From 080a7c4f3e1e6b1f293aac7e733ecd017e980f2e Mon Sep 17 00:00:00 2001 From: Miguel Angel Ajo Pelayo Date: Wed, 21 Jan 2026 22:14:28 +0100 Subject: [PATCH 01/30] Initial commit: monorepo creation From 614b1ebbe1357a65ec0cf9de57daa5cf9b90cee5 Mon Sep 17 00:00:00 2001 From: Miguel Angel Ajo Pelayo Date: Wed, 21 Jan 2026 22:14:30 +0100 Subject: [PATCH 02/30] Fix Python package paths for monorepo structure Update pyproject.toml files to adjust raw-options root paths from '../..' to '../../..' to account for monorepo subdirectory. --- .gitignore | 27 ++ Makefile | 178 ++++++++ README.md | 172 ++++++++ import_pr.sh | 385 ++++++++++++++++++ .../__templates__/driver/pyproject.toml.tmpl | 2 +- .../packages/jumpstarter-all/pyproject.toml | 2 +- .../jumpstarter-cli-admin/pyproject.toml | 2 +- .../jumpstarter-cli-common/pyproject.toml | 2 +- .../jumpstarter-cli-driver/pyproject.toml | 2 +- .../packages/jumpstarter-cli/pyproject.toml | 2 +- .../jumpstarter-driver-ble/pyproject.toml | 2 +- .../jumpstarter-driver-can/pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../jumpstarter-driver-dutlink/pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../jumpstarter-driver-gpiod/pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../jumpstarter-driver-http/pyproject.toml | 2 +- .../jumpstarter-driver-iscsi/pyproject.toml | 2 +- .../jumpstarter-driver-network/pyproject.toml | 2 +- .../jumpstarter-driver-opendal/pyproject.toml | 2 +- .../jumpstarter-driver-power/pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../jumpstarter-driver-qemu/pyproject.toml | 2 +- .../jumpstarter-driver-ridesx/pyproject.toml | 2 +- .../jumpstarter-driver-sdwire/pyproject.toml | 2 +- .../jumpstarter-driver-shell/pyproject.toml | 2 +- .../jumpstarter-driver-snmp/pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../jumpstarter-driver-ssh/pyproject.toml | 2 +- .../jumpstarter-driver-tasmota/pyproject.toml | 2 +- .../jumpstarter-driver-tftp/pyproject.toml | 2 +- .../jumpstarter-driver-tmt/pyproject.toml | 2 +- .../jumpstarter-driver-uboot/pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../jumpstarter-driver-vnc/pyproject.toml | 2 +- .../jumpstarter-driver-yepkit/pyproject.toml | 2 +- .../jumpstarter-imagehash/pyproject.toml | 2 +- .../jumpstarter-kubernetes/pyproject.toml | 2 +- .../jumpstarter-protocol/pyproject.toml | 2 +- .../jumpstarter-testing/pyproject.toml | 2 +- python/packages/jumpstarter/pyproject.toml | 2 +- typos.toml | 33 ++ 46 files changed, 836 insertions(+), 41 deletions(-) create mode 100644 .gitignore create mode 100644 Makefile create mode 100644 README.md create mode 100755 import_pr.sh create mode 100644 typos.toml diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..a746eb15 --- /dev/null +++ b/.gitignore @@ -0,0 +1,27 @@ +# E2E test artifacts and local configuration +.e2e-setup-complete +.e2e/ +.bats/ +ca.pem +ca-key.pem +ca.csr +server.pem +server-key.pem +server.csr + +# Python +.venv/ +__pycache__/ +*.pyc +*.pyo +*.egg-info/ +dist/ +build/ + +# Editor/IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ +.DS_Store diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..a53d5ed3 --- /dev/null +++ b/Makefile @@ -0,0 +1,178 @@ +# Jumpstarter Monorepo Makefile +# +# This Makefile provides common targets that delegate to subdirectory Makefiles. +# + +# Subdirectories containing projects +SUBDIRS := python protocol controller e2e + +# Default target +.PHONY: all +all: build + +# Help target - shows available commands +.PHONY: help +help: + @echo "Jumpstarter Monorepo" + @echo "" + @echo "Available targets:" + @echo " make all - Build all projects (default)" + @echo " make build - Build all projects" + @echo " make test - Run tests in all projects" + @echo " make clean - Clean build artifacts in all projects" + @echo " make lint - Run linters in all projects" + @echo " make fmt - Format code in all projects" + @echo "" + @echo "End-to-end testing:" + @echo " make e2e-setup - Setup e2e test environment (one-time)" + @echo " make e2e-run - Run e2e tests (requires e2e-setup first)" + @echo " make e2e - Same as e2e-run" + @echo " make e2e-full - Full setup + run (for CI or first time)" + @echo " make e2e-clean - Clean up e2e test environment (delete cluster, certs, etc.)" + @echo "" + @echo "Per-project targets:" + @echo " make build- - Build specific project" + @echo " make test- - Test specific project" + @echo " make clean- - Clean specific project" + @echo "" + @echo "Projects: $(SUBDIRS)" + +# Build all projects +.PHONY: build +build: + @for dir in $(SUBDIRS); do \ + if [ -f $$dir/Makefile ]; then \ + echo "Building $$dir..."; \ + $(MAKE) -C $$dir build || true; \ + fi \ + done + +# Test all projects +.PHONY: test +test: + @for dir in $(SUBDIRS); do \ + if [ -f $$dir/Makefile ]; then \ + echo "Testing $$dir..."; \ + $(MAKE) -C $$dir test ; \ + fi \ + done + +# Clean all projects +.PHONY: clean +clean: + @for dir in $(SUBDIRS); do \ + if [ -f $$dir/Makefile ]; then \ + echo "Cleaning $$dir..."; \ + $(MAKE) -C $$dir clean || true; \ + fi \ + done + +# Lint all projects +.PHONY: lint +lint: + @for dir in $(SUBDIRS); do \ + if [ -f $$dir/Makefile ]; then \ + echo "Linting $$dir..."; \ + $(MAKE) -C $$dir lint; \ + fi \ + done + +# Format all projects +.PHONY: fmt +fmt: + @for dir in $(SUBDIRS); do \ + if [ -f $$dir/Makefile ]; then \ + echo "Formatting $$dir..."; \ + $(MAKE) -C $$dir fmt || true; \ + fi \ + done + +# Per-project build targets +.PHONY: build-python build-protocol build-controller build-e2e +build-python: + @if [ -f python/Makefile ]; then $(MAKE) -C python build; fi + +build-protocol: + @if [ -f protocol/Makefile ]; then $(MAKE) -C protocol build; fi + +build-controller: + @if [ -f controller/Makefile ]; then $(MAKE) -C controller build; fi + +build-e2e: + @if [ -f e2e/Makefile ]; then $(MAKE) -C e2e build; fi + +# Per-project test targets +.PHONY: test-python test-protocol test-controller test-e2e +test-python: + @if [ -f python/Makefile ]; then $(MAKE) -C python test; fi + +test-protocol: + @if [ -f protocol/Makefile ]; then $(MAKE) -C protocol test; fi + +test-controller: + @if [ -f controller/Makefile ]; then $(MAKE) -C controller test; fi + +# Setup e2e testing environment (one-time) +.PHONY: e2e-setup +e2e-setup: + @echo "Setting up e2e test environment..." + @bash e2e/setup-e2e.sh + +# Run e2e tests +.PHONY: e2e-run +e2e-run: + @echo "Running e2e tests..." + @bash e2e/run-e2e.sh + +# Convenience alias for running e2e tests +.PHONY: e2e +e2e: e2e-run + +# Full e2e setup + run +.PHONY: e2e-full +e2e-full: + @bash e2e/run-e2e.sh --full + +# Clean up e2e test environment +.PHONY: e2e-clean +e2e-clean: + @echo "Cleaning up e2e test environment..." + @if command -v kind >/dev/null 2>&1; then \ + echo "Deleting jumpstarter kind cluster..."; \ + kind delete cluster --name jumpstarter 2>/dev/null || true; \ + fi + @echo "Removing certificates and setup files..." + @rm -f ca.pem ca-key.pem ca.csr server.pem server-key.pem server.csr + @rm -f .e2e-setup-complete + @echo "Removing local e2e configuration directory..." + @rm -rf .e2e + @echo "Removing virtual environment..." + @rm -rf .venv + @echo "Removing local bats libraries..." + @rm -rf .bats + @if [ -d /etc/jumpstarter/exporters ] && [ -w /etc/jumpstarter/exporters ]; then \ + echo "Removing exporter configs..."; \ + rm -rf /etc/jumpstarter/exporters/* 2>/dev/null || true; \ + fi + @echo "โœ“ E2E test environment cleaned" + @echo "" + @echo "Note: You may need to manually remove the dex entry from /etc/hosts:" + @echo " sudo sed -i.bak '/dex.dex.svc.cluster.local/d' /etc/hosts" + +# Backward compatibility alias +.PHONY: test-e2e +test-e2e: e2e-run + +# Per-project clean targets +.PHONY: clean-python clean-protocol clean-controller clean-e2e +clean-python: + @if [ -f python/Makefile ]; then $(MAKE) -C python clean; fi + +clean-protocol: + @if [ -f protocol/Makefile ]; then $(MAKE) -C protocol clean; fi + +clean-controller: + @if [ -f controller/Makefile ]; then $(MAKE) -C controller clean; fi + +clean-e2e: + @if [ -f e2e/Makefile ]; then $(MAKE) -C e2e clean; fi diff --git a/README.md b/README.md new file mode 100644 index 00000000..f50fba91 --- /dev/null +++ b/README.md @@ -0,0 +1,172 @@ +# ![bolt](python/assets/bolt.svg) Jumpstarter + +[![Matrix](https://img.shields.io/matrix/jumpstarter%3Amatrix.org?color=blue)](https://matrix.to/#/#jumpstarter:matrix.org) +[![Etherpad](https://img.shields.io/badge/Etherpad-Notes-blue?logo=etherpad)](https://etherpad.jumpstarter.dev/pad-lister) +[![Community Meeting](https://img.shields.io/badge/Weekly%20Meeting-Google%20Meet-blue?logo=google-meet)](https://meet.google.com/gzd-hhbd-hpu) +![GitHub Release](https://img.shields.io/github/v/release/jumpstarter-dev/jumpstarter) +![PyPI - Version](https://img.shields.io/pypi/v/jumpstarter) +[![Ask DeepWiki](https://deepwiki.com/badge.svg)](https://deepwiki.com/jumpstarter-dev/jumpstarter) + +A free, open source tool for automated testing on real and virtual hardware with +CI/CD integration. Simplify device automation with consistent rules across local +and distributed environments. + +## Highlights + +- ๐Ÿงช **Unified Testing** - One tool for local, virtual, and remote hardware +- ๐Ÿ **Python-Powered** - Leverage Python's testing ecosystem +- ๐Ÿ”Œ **Hardware Abstraction** - Simplify complex hardware interfaces with drivers +- ๐ŸŒ **Collaborative** - Share test hardware globally +- โš™๏ธ **CI/CD Ready** - Works with cloud native developer environments and pipelines +- ๐Ÿ’ป **Cross-Platform** - Supports Linux and macOS + +## Repository Structure + +This monorepo contains all Jumpstarter components: + +| Directory | Description | +|-----------|-------------| +| [`python/`](python/) | Python client, CLI, drivers, and testing framework | +| [`controller/`](controller/) | Kubernetes controller and operator (Jumpstarter Service) | +| [`protocol/`](protocol/) | gRPC protocol definitions (protobuf) | +| [`e2e/`](e2e/) | End-to-end testing infrastructure | + +## Quick Start + +### Install the CLI + +```shell +pip install --extra-index-url https://pkg.jumpstarter.dev/ jumpstarter-cli +``` + +Or install all Python components: + +```shell +pip install --extra-index-url https://pkg.jumpstarter.dev/ jumpstarter-all +``` + +### Deploy the Service + +To install the Jumpstarter Service in your Kubernetes cluster, see the +[Service Installation](https://jumpstarter.dev/main/getting-started/installation/index.html) +documentation. + +## Components + +### Python Client & Drivers (`python/`) + +The Python implementation provides: +- `jmp` CLI tool for interacting with hardware +- Client libraries for test automation +- Hardware drivers for various devices +- Testing framework integration + +See [`python/README.md`](python/README.md) for details. + +### Jumpstarter Service (`controller/`) + +The Kubernetes-native service that provides: +- Centralized hardware management +- Client and exporter routing +- Authentication and authorization +- Multi-tenant support + +**Prerequisites:** +- Kubernetes v1.11.3+ +- kubectl v1.11.3+ + +See [`controller/README.md`](controller/README.md) for deployment instructions. + +### Protocol (`protocol/`) + +The gRPC-based communication layer that enables: +- Unified interface for virtual and physical hardware +- Secure communication over HTTPS +- Tunneling support for Unix sockets, TCP, and UDP +- Flexible topology with direct or routed connections + +See [`protocol/README.md`](protocol/README.md) for details. + +### End-to-End Tests (`e2e/`) + +Comprehensive testing infrastructure for the entire Jumpstarter stack: +- `setup-e2e.sh` - One-time environment setup (auto-installs bats libraries on macOS) +- `run-e2e.sh` - Quick test runner for iterations +- `action.yml` - GitHub Actions composite action for CI/CD +- Full integration tests covering authentication, exporters, and clients + +Run e2e tests locally: +```shell +# First time setup +make e2e-setup + +# Run tests (repeat as needed) +make e2e # or: make e2e-run + +# Or full setup + run in one command +make e2e-full + +# Clean up e2e environment (delete cluster, certs, etc.) +make e2e-clean +``` + +## Development + +### Prerequisites + +- Python 3.11+ (for Python components) +- Go 1.22+ (for controller) +- Docker/Podman (for container builds) +- kubectl (for Kubernetes deployment) + +### Building + +```shell +# Build all components +make all + +# Build specific components +make python # Python packages +make controller # Controller binary +make protocol # Generate protocol code + +# Run tests +make test + +# Run end-to-end tests +make e2e-setup # First time only +make e2e # Run tests +make e2e-clean # Clean up +``` + +### Running Locally + +```shell +# Start a local development environment +make dev +``` + +## Documentation + +Jumpstarter's documentation is available at [jumpstarter.dev](https://jumpstarter.dev). + +- [Getting Started](https://jumpstarter.dev/main/getting-started/) +- [User Guide](https://jumpstarter.dev/main/introduction/) +- [API Reference](https://jumpstarter.dev/main/api/) +- [Contributing Guide](https://jumpstarter.dev/main/contributing.html) + +## Contributing + +Jumpstarter welcomes contributors of all levels of experience! See the +[contributing guide](https://jumpstarter.dev/main/contributing.html) to get started. + +### Community + +- [Matrix Chat](https://matrix.to/#/#jumpstarter:matrix.org) +- [Weekly Meeting](https://meet.google.com/gzd-hhbd-hpu) +- [Meeting Notes](https://etherpad.jumpstarter.dev/pad-lister) + +## License + +Jumpstarter is licensed under the Apache 2.0 License ([LICENSE](LICENSE) or +[https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)). diff --git a/import_pr.sh b/import_pr.sh new file mode 100755 index 00000000..c945f37b --- /dev/null +++ b/import_pr.sh @@ -0,0 +1,385 @@ +#!/bin/bash +# +# import_pr.sh +# +# Imports a PR from an upstream Jumpstarter repository into the monorepo. +# This script fetches PR commits, generates patches, and applies them with +# the correct directory prefix for the monorepo structure. +# +# Usage: ./import_pr.sh +# +# Arguments: +# repo - One of: python, protocol, controller, e2e +# pr_number - The PR number from the upstream repository +# +# Example: +# ./import_pr.sh python 123 +# ./import_pr.sh controller 45 +# + +set -e + +# Configuration +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +TEMP_DIR="${SCRIPT_DIR}/.import-pr-temp" +PATCH_DIR="${TEMP_DIR}/patches" + +# Repository mappings: repo_name -> "github_repo subdir" +declare -A REPO_MAP=( + ["python"]="jumpstarter-dev/jumpstarter python" + ["protocol"]="jumpstarter-dev/jumpstarter-protocol protocol" + ["controller"]="jumpstarter-dev/jumpstarter-controller controller" + ["e2e"]="jumpstarter-dev/jumpstarter-e2e e2e" +) + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# Logging functions +log_info() { + echo -e "${GREEN}[INFO]${NC} $1" +} + +log_warn() { + echo -e "${YELLOW}[WARN]${NC} $1" +} + +log_error() { + echo -e "${RED}[ERROR]${NC} $1" +} + +log_step() { + echo -e "${BLUE}[STEP]${NC} $1" +} + +# Cleanup function +cleanup() { + local exit_code=$? + if [ -d "${TEMP_DIR}" ]; then + log_info "Cleaning up temporary directory..." + rm -rf "${TEMP_DIR}" + fi + if [ $exit_code -ne 0 ]; then + log_warn "Script exited with errors. Any partial changes may need to be reverted." + fi +} + +trap cleanup EXIT + +# Print usage +usage() { + echo "Usage: $0 " + echo "" + echo "Import a PR from an upstream repository into the monorepo." + echo "" + echo "Arguments:" + echo " repo - One of: python, protocol, controller, e2e" + echo " pr_number - The PR number from the upstream repository" + echo "" + echo "Examples:" + echo " $0 python 123 # Import PR #123 from jumpstarter repo" + echo " $0 controller 45 # Import PR #45 from controller repo" + echo "" + echo "Repository mappings:" + echo " python -> jumpstarter-dev/jumpstarter" + echo " protocol -> jumpstarter-dev/jumpstarter-protocol" + echo " controller -> jumpstarter-dev/jumpstarter-controller" + echo " e2e -> jumpstarter-dev/jumpstarter-e2e" + exit 1 +} + +# Check dependencies +check_dependencies() { + log_step "Checking dependencies..." + + if ! command -v git &> /dev/null; then + log_error "git is not installed. Please install git first." + exit 1 + fi + + if ! command -v gh &> /dev/null; then + log_error "gh (GitHub CLI) is not installed." + echo "Install it from: https://cli.github.com/" + exit 1 + fi + + # Check if gh is authenticated + if ! gh auth status &> /dev/null; then + log_error "gh is not authenticated. Please run 'gh auth login' first." + exit 1 + fi + + log_info "All dependencies found." +} + +# Validate arguments +validate_args() { + if [ $# -lt 2 ]; then + log_error "Missing arguments." + usage + fi + + local repo="$1" + local pr_number="$2" + + # Validate repo name + if [ -z "${REPO_MAP[$repo]}" ]; then + log_error "Invalid repository name: ${repo}" + echo "Valid options are: python, protocol, controller, e2e" + exit 1 + fi + + # Validate PR number is numeric + if ! [[ "$pr_number" =~ ^[0-9]+$ ]]; then + log_error "PR number must be a positive integer: ${pr_number}" + exit 1 + fi +} + +# Fetch PR information +fetch_pr_info() { + local github_repo="$1" + local pr_number="$2" + + log_step "Fetching PR #${pr_number} info from ${github_repo}..." + + # Get PR details as JSON + local pr_json + pr_json=$(gh pr view "${pr_number}" --repo "${github_repo}" --json title,baseRefName,headRefName,commits,state 2>&1) || { + log_error "Failed to fetch PR #${pr_number} from ${github_repo}" + echo "Make sure the PR exists and you have access to the repository." + exit 1 + } + + # Extract fields + PR_TITLE=$(echo "$pr_json" | jq -r '.title') + PR_BASE_BRANCH=$(echo "$pr_json" | jq -r '.baseRefName') + PR_HEAD_BRANCH=$(echo "$pr_json" | jq -r '.headRefName') + PR_COMMIT_COUNT=$(echo "$pr_json" | jq '.commits | length') + PR_STATE=$(echo "$pr_json" | jq -r '.state') + + log_info "PR Title: ${PR_TITLE}" + log_info "Base Branch: ${PR_BASE_BRANCH}" + log_info "Head Branch: ${PR_HEAD_BRANCH}" + log_info "Commits: ${PR_COMMIT_COUNT}" + log_info "State: ${PR_STATE}" +} + +# Clone repository and checkout PR +clone_and_checkout_pr() { + local github_repo="$1" + local pr_number="$2" + + log_step "Cloning repository and checking out PR..." + + # Create temp directory + mkdir -p "${TEMP_DIR}" + mkdir -p "${PATCH_DIR}" + + local clone_dir="${TEMP_DIR}/repo" + + # Clone the repository + log_info "Cloning ${github_repo}..." + gh repo clone "${github_repo}" "${clone_dir}" -- --depth=1 --no-single-branch 2>/dev/null || { + # If shallow clone fails, try full clone + gh repo clone "${github_repo}" "${clone_dir}" + } + + cd "${clone_dir}" + + # Checkout the PR + log_info "Checking out PR #${pr_number}..." + gh pr checkout "${pr_number}" --repo "${github_repo}" + + # Fetch the base branch to ensure we have it + log_info "Fetching base branch (${PR_BASE_BRANCH})..." + git fetch origin "${PR_BASE_BRANCH}" + + CLONE_DIR="${clone_dir}" +} + +# Generate patches for PR commits +generate_patches() { + log_step "Generating patches..." + + cd "${CLONE_DIR}" + + # Find the merge base between the PR branch and the base branch + local merge_base + merge_base=$(git merge-base "origin/${PR_BASE_BRANCH}" HEAD) + + log_info "Merge base: ${merge_base}" + + # Count commits to be patched + local commit_count + commit_count=$(git rev-list --count "${merge_base}..HEAD") + log_info "Commits to import: ${commit_count}" + + if [ "$commit_count" -eq 0 ]; then + log_error "No commits found between merge base and HEAD." + exit 1 + fi + + # Generate patches + git format-patch -o "${PATCH_DIR}" "${merge_base}..HEAD" + + # Count generated patches + PATCH_COUNT=$(ls -1 "${PATCH_DIR}"/*.patch 2>/dev/null | wc -l | tr -d ' ') + log_info "Generated ${PATCH_COUNT} patch file(s)." +} + +# Apply patches to monorepo +apply_patches() { + local subdir="$1" + local repo_name="$2" + local pr_number="$3" + + log_step "Applying patches to monorepo..." + + cd "${SCRIPT_DIR}" + + # Create branch name + local branch_name="import/${repo_name}-pr-${pr_number}" + + # Check if we're in a git repository + if ! git rev-parse --git-dir &> /dev/null; then + log_error "Not in a git repository. Please run this script from the monorepo root." + exit 1 + fi + + # Check for uncommitted changes + if ! git diff --quiet || ! git diff --cached --quiet; then + log_error "You have uncommitted changes. Please commit or stash them first." + exit 1 + fi + + # Check if branch already exists + if git show-ref --verify --quiet "refs/heads/${branch_name}"; then + log_error "Branch '${branch_name}' already exists." + echo "Delete it with: git branch -D ${branch_name}" + exit 1 + fi + + # Create and checkout new branch + log_info "Creating branch: ${branch_name}" + git checkout -b "${branch_name}" + + # Apply patches with directory prefix + log_info "Applying patches with directory prefix: ${subdir}/" + + local patch_files=("${PATCH_DIR}"/*.patch) + local applied=0 + local failed=0 + + for patch in "${patch_files[@]}"; do + if [ -f "$patch" ]; then + local patch_name + patch_name=$(basename "$patch") + if git am --directory="${subdir}" "$patch" 2>/dev/null; then + log_info "Applied: ${patch_name}" + ((applied++)) + else + log_error "Failed to apply: ${patch_name}" + ((failed++)) + # Abort the am session + git am --abort 2>/dev/null || true + break + fi + fi + done + + if [ "$failed" -gt 0 ]; then + log_error "Failed to apply ${failed} patch(es)." + echo "" + echo "The patch may have conflicts. You can try to resolve them manually:" + echo " 1. git checkout main" + echo " 2. git branch -D ${branch_name}" + echo " 3. Manually apply the changes from the upstream PR" + exit 1 + fi + + APPLIED_COUNT=$applied +} + +# Print success message and next steps +print_success() { + local repo_name="$1" + local pr_number="$2" + local github_repo="$3" + local branch_name="import/${repo_name}-pr-${pr_number}" + + echo "" + echo -e "${GREEN}========================================${NC}" + echo -e "${GREEN} PR Import Successful!${NC}" + echo -e "${GREEN}========================================${NC}" + echo "" + echo "Summary:" + echo " - Source: ${github_repo}#${pr_number}" + echo " - Title: ${PR_TITLE}" + echo " - Branch: ${branch_name}" + echo " - Commits applied: ${APPLIED_COUNT}" + echo "" + echo "Next steps:" + echo " 1. Review the imported commits:" + echo " git log --oneline main..HEAD" + echo "" + echo " 2. Push the branch and create a PR on the monorepo:" + echo " git push -u origin ${branch_name}" + echo " gh pr create --title \"${PR_TITLE}\" --body \"Imported from ${github_repo}#${pr_number}\"" + echo "" + echo " 3. Or if you need to make changes first:" + echo " # Make your changes" + echo " git add -A && git commit --amend" + echo "" +} + +# Main execution +main() { + local repo_name="$1" + local pr_number="$2" + + echo "" + log_info "Starting PR import: ${repo_name} #${pr_number}" + echo "" + + # Validate arguments + validate_args "$@" + + # Check dependencies + check_dependencies + echo "" + + # Parse repo mapping + local repo_info="${REPO_MAP[$repo_name]}" + local github_repo subdir + read -r github_repo subdir <<< "${repo_info}" + + log_info "GitHub Repo: ${github_repo}" + log_info "Monorepo Subdir: ${subdir}/" + echo "" + + # Fetch PR info + fetch_pr_info "${github_repo}" "${pr_number}" + echo "" + + # Clone and checkout PR + clone_and_checkout_pr "${github_repo}" "${pr_number}" + echo "" + + # Generate patches + generate_patches + echo "" + + # Apply patches to monorepo + apply_patches "${subdir}" "${repo_name}" "${pr_number}" + echo "" + + # Print success message + print_success "${repo_name}" "${pr_number}" "${github_repo}" +} + +main "$@" diff --git a/python/__templates__/driver/pyproject.toml.tmpl b/python/__templates__/driver/pyproject.toml.tmpl index e7dbb11e..71d1643c 100644 --- a/python/__templates__/driver/pyproject.toml.tmpl +++ b/python/__templates__/driver/pyproject.toml.tmpl @@ -15,7 +15,7 @@ dependencies = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../'} +raw-options = { 'root' = '../../../'} [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-all/pyproject.toml b/python/packages/jumpstarter-all/pyproject.toml index a071b28c..036e4ee4 100644 --- a/python/packages/jumpstarter-all/pyproject.toml +++ b/python/packages/jumpstarter-all/pyproject.toml @@ -52,7 +52,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-cli-admin/pyproject.toml b/python/packages/jumpstarter-cli-admin/pyproject.toml index c8278da5..886d9767 100644 --- a/python/packages/jumpstarter-cli-admin/pyproject.toml +++ b/python/packages/jumpstarter-cli-admin/pyproject.toml @@ -32,7 +32,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-cli-common/pyproject.toml b/python/packages/jumpstarter-cli-common/pyproject.toml index a52e3d0c..e892545f 100644 --- a/python/packages/jumpstarter-cli-common/pyproject.toml +++ b/python/packages/jumpstarter-cli-common/pyproject.toml @@ -34,7 +34,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-cli-driver/pyproject.toml b/python/packages/jumpstarter-cli-driver/pyproject.toml index c1856a50..db35f025 100644 --- a/python/packages/jumpstarter-cli-driver/pyproject.toml +++ b/python/packages/jumpstarter-cli-driver/pyproject.toml @@ -32,7 +32,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-cli/pyproject.toml b/python/packages/jumpstarter-cli/pyproject.toml index 8d8afa4a..f05a03e0 100644 --- a/python/packages/jumpstarter-cli/pyproject.toml +++ b/python/packages/jumpstarter-cli/pyproject.toml @@ -37,7 +37,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-ble/pyproject.toml b/python/packages/jumpstarter-driver-ble/pyproject.toml index 6f03d3e7..13d50eeb 100644 --- a/python/packages/jumpstarter-driver-ble/pyproject.toml +++ b/python/packages/jumpstarter-driver-ble/pyproject.toml @@ -14,7 +14,7 @@ dependencies = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-can/pyproject.toml b/python/packages/jumpstarter-driver-can/pyproject.toml index 97b4b463..c00e8038 100644 --- a/python/packages/jumpstarter-driver-can/pyproject.toml +++ b/python/packages/jumpstarter-driver-can/pyproject.toml @@ -26,7 +26,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-composite/pyproject.toml b/python/packages/jumpstarter-driver-composite/pyproject.toml index bd1e25e5..0fe9e999 100644 --- a/python/packages/jumpstarter-driver-composite/pyproject.toml +++ b/python/packages/jumpstarter-driver-composite/pyproject.toml @@ -24,7 +24,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-corellium/pyproject.toml b/python/packages/jumpstarter-driver-corellium/pyproject.toml index 05810dcc..e0bd6f69 100644 --- a/python/packages/jumpstarter-driver-corellium/pyproject.toml +++ b/python/packages/jumpstarter-driver-corellium/pyproject.toml @@ -27,7 +27,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-dutlink/pyproject.toml b/python/packages/jumpstarter-driver-dutlink/pyproject.toml index acc77f4e..7e81dc4e 100644 --- a/python/packages/jumpstarter-driver-dutlink/pyproject.toml +++ b/python/packages/jumpstarter-driver-dutlink/pyproject.toml @@ -37,7 +37,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-energenie/pyproject.toml b/python/packages/jumpstarter-driver-energenie/pyproject.toml index 0e5f22e1..4aa67ab0 100644 --- a/python/packages/jumpstarter-driver-energenie/pyproject.toml +++ b/python/packages/jumpstarter-driver-energenie/pyproject.toml @@ -30,7 +30,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../'} +raw-options = { 'root' = '../../../'} [build-system] requires = ["hatchling", "hatch-vcs"] diff --git a/python/packages/jumpstarter-driver-flashers/pyproject.toml b/python/packages/jumpstarter-driver-flashers/pyproject.toml index 2127a6fa..26db295b 100644 --- a/python/packages/jumpstarter-driver-flashers/pyproject.toml +++ b/python/packages/jumpstarter-driver-flashers/pyproject.toml @@ -23,7 +23,7 @@ dependencies = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-gpiod/pyproject.toml b/python/packages/jumpstarter-driver-gpiod/pyproject.toml index 5fb933d8..ff5ca6ed 100644 --- a/python/packages/jumpstarter-driver-gpiod/pyproject.toml +++ b/python/packages/jumpstarter-driver-gpiod/pyproject.toml @@ -26,7 +26,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-http-power/pyproject.toml b/python/packages/jumpstarter-driver-http-power/pyproject.toml index 5791f9a2..39b5f8cb 100644 --- a/python/packages/jumpstarter-driver-http-power/pyproject.toml +++ b/python/packages/jumpstarter-driver-http-power/pyproject.toml @@ -17,7 +17,7 @@ dependencies = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-http/pyproject.toml b/python/packages/jumpstarter-driver-http/pyproject.toml index 55d6053f..06cd3553 100644 --- a/python/packages/jumpstarter-driver-http/pyproject.toml +++ b/python/packages/jumpstarter-driver-http/pyproject.toml @@ -16,7 +16,7 @@ dependencies = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-iscsi/pyproject.toml b/python/packages/jumpstarter-driver-iscsi/pyproject.toml index e8ee9cc7..a591b6a0 100644 --- a/python/packages/jumpstarter-driver-iscsi/pyproject.toml +++ b/python/packages/jumpstarter-driver-iscsi/pyproject.toml @@ -17,7 +17,7 @@ dependencies = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-network/pyproject.toml b/python/packages/jumpstarter-driver-network/pyproject.toml index fcf69512..b0259510 100644 --- a/python/packages/jumpstarter-driver-network/pyproject.toml +++ b/python/packages/jumpstarter-driver-network/pyproject.toml @@ -46,7 +46,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-opendal/pyproject.toml b/python/packages/jumpstarter-driver-opendal/pyproject.toml index 316e1810..7fe90c9f 100644 --- a/python/packages/jumpstarter-driver-opendal/pyproject.toml +++ b/python/packages/jumpstarter-driver-opendal/pyproject.toml @@ -23,7 +23,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-power/pyproject.toml b/python/packages/jumpstarter-driver-power/pyproject.toml index 82ae4d2b..1106dded 100644 --- a/python/packages/jumpstarter-driver-power/pyproject.toml +++ b/python/packages/jumpstarter-driver-power/pyproject.toml @@ -23,7 +23,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-probe-rs/pyproject.toml b/python/packages/jumpstarter-driver-probe-rs/pyproject.toml index 16243c0b..56fe83c9 100644 --- a/python/packages/jumpstarter-driver-probe-rs/pyproject.toml +++ b/python/packages/jumpstarter-driver-probe-rs/pyproject.toml @@ -15,7 +15,7 @@ dependencies = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-pyserial/pyproject.toml b/python/packages/jumpstarter-driver-pyserial/pyproject.toml index 24b8db14..20792b73 100644 --- a/python/packages/jumpstarter-driver-pyserial/pyproject.toml +++ b/python/packages/jumpstarter-driver-pyserial/pyproject.toml @@ -31,7 +31,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-qemu/pyproject.toml b/python/packages/jumpstarter-driver-qemu/pyproject.toml index c44f78ec..3c77f05e 100644 --- a/python/packages/jumpstarter-driver-qemu/pyproject.toml +++ b/python/packages/jumpstarter-driver-qemu/pyproject.toml @@ -30,7 +30,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.uv.sources] jumpstarter-driver-opendal = { workspace = true } diff --git a/python/packages/jumpstarter-driver-ridesx/pyproject.toml b/python/packages/jumpstarter-driver-ridesx/pyproject.toml index 567d25c9..29bd45a2 100644 --- a/python/packages/jumpstarter-driver-ridesx/pyproject.toml +++ b/python/packages/jumpstarter-driver-ridesx/pyproject.toml @@ -16,7 +16,7 @@ dependencies = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-sdwire/pyproject.toml b/python/packages/jumpstarter-driver-sdwire/pyproject.toml index 9333a68e..1b50ab47 100644 --- a/python/packages/jumpstarter-driver-sdwire/pyproject.toml +++ b/python/packages/jumpstarter-driver-sdwire/pyproject.toml @@ -25,7 +25,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-shell/pyproject.toml b/python/packages/jumpstarter-driver-shell/pyproject.toml index ca264127..a866cfc5 100644 --- a/python/packages/jumpstarter-driver-shell/pyproject.toml +++ b/python/packages/jumpstarter-driver-shell/pyproject.toml @@ -27,7 +27,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-snmp/pyproject.toml b/python/packages/jumpstarter-driver-snmp/pyproject.toml index 9cb1a54b..192edcb1 100644 --- a/python/packages/jumpstarter-driver-snmp/pyproject.toml +++ b/python/packages/jumpstarter-driver-snmp/pyproject.toml @@ -37,7 +37,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-ssh-mitm/pyproject.toml b/python/packages/jumpstarter-driver-ssh-mitm/pyproject.toml index 9d24fd79..4d4ca9f2 100644 --- a/python/packages/jumpstarter-driver-ssh-mitm/pyproject.toml +++ b/python/packages/jumpstarter-driver-ssh-mitm/pyproject.toml @@ -20,7 +20,7 @@ ssh_mitm = "jumpstarter_driver_ssh_mitm" [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../'} +raw-options = { 'root' = '../../../'} [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-ssh/pyproject.toml b/python/packages/jumpstarter-driver-ssh/pyproject.toml index ee557bc9..e195155a 100644 --- a/python/packages/jumpstarter-driver-ssh/pyproject.toml +++ b/python/packages/jumpstarter-driver-ssh/pyproject.toml @@ -18,7 +18,7 @@ dependencies = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../'} +raw-options = { 'root' = '../../../'} [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-tasmota/pyproject.toml b/python/packages/jumpstarter-driver-tasmota/pyproject.toml index ae32a119..696d957c 100644 --- a/python/packages/jumpstarter-driver-tasmota/pyproject.toml +++ b/python/packages/jumpstarter-driver-tasmota/pyproject.toml @@ -19,7 +19,7 @@ TasmotaPower = "jumpstarter_driver_tasmota.driver:TasmotaPower" [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-tftp/pyproject.toml b/python/packages/jumpstarter-driver-tftp/pyproject.toml index bb3429a7..ddf802da 100644 --- a/python/packages/jumpstarter-driver-tftp/pyproject.toml +++ b/python/packages/jumpstarter-driver-tftp/pyproject.toml @@ -25,7 +25,7 @@ dev = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-tmt/pyproject.toml b/python/packages/jumpstarter-driver-tmt/pyproject.toml index 30278746..6db0f89d 100644 --- a/python/packages/jumpstarter-driver-tmt/pyproject.toml +++ b/python/packages/jumpstarter-driver-tmt/pyproject.toml @@ -17,7 +17,7 @@ dependencies = [ [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../'} +raw-options = { 'root' = '../../../'} [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-uboot/pyproject.toml b/python/packages/jumpstarter-driver-uboot/pyproject.toml index 39f66c28..e08af362 100644 --- a/python/packages/jumpstarter-driver-uboot/pyproject.toml +++ b/python/packages/jumpstarter-driver-uboot/pyproject.toml @@ -28,7 +28,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.uv.sources] jumpstarter-driver-composite = { workspace = true } diff --git a/python/packages/jumpstarter-driver-ustreamer/pyproject.toml b/python/packages/jumpstarter-driver-ustreamer/pyproject.toml index ed239575..aeae7284 100644 --- a/python/packages/jumpstarter-driver-ustreamer/pyproject.toml +++ b/python/packages/jumpstarter-driver-ustreamer/pyproject.toml @@ -21,7 +21,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-driver-vnc/pyproject.toml b/python/packages/jumpstarter-driver-vnc/pyproject.toml index 49ce3545..93aabbcd 100644 --- a/python/packages/jumpstarter-driver-vnc/pyproject.toml +++ b/python/packages/jumpstarter-driver-vnc/pyproject.toml @@ -21,7 +21,7 @@ vnc = "jumpstarter_driver_vnc.driver:Vnc" [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../'} +raw-options = { 'root' = '../../../'} [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-driver-yepkit/pyproject.toml b/python/packages/jumpstarter-driver-yepkit/pyproject.toml index 13607a5d..a90ae2df 100644 --- a/python/packages/jumpstarter-driver-yepkit/pyproject.toml +++ b/python/packages/jumpstarter-driver-yepkit/pyproject.toml @@ -19,7 +19,7 @@ Ykush = "jumpstarter_driver_yepkit.driver:Ykush" [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [tool.hatch.metadata.hooks.vcs.urls] Homepage = "https://jumpstarter.dev" diff --git a/python/packages/jumpstarter-imagehash/pyproject.toml b/python/packages/jumpstarter-imagehash/pyproject.toml index 92e13e67..3227bfd0 100644 --- a/python/packages/jumpstarter-imagehash/pyproject.toml +++ b/python/packages/jumpstarter-imagehash/pyproject.toml @@ -21,7 +21,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-kubernetes/pyproject.toml b/python/packages/jumpstarter-kubernetes/pyproject.toml index 39335b9c..db3a20d6 100644 --- a/python/packages/jumpstarter-kubernetes/pyproject.toml +++ b/python/packages/jumpstarter-kubernetes/pyproject.toml @@ -30,7 +30,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-protocol/pyproject.toml b/python/packages/jumpstarter-protocol/pyproject.toml index 4eaa6fa9..2cbf7e6a 100644 --- a/python/packages/jumpstarter-protocol/pyproject.toml +++ b/python/packages/jumpstarter-protocol/pyproject.toml @@ -29,7 +29,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter-testing/pyproject.toml b/python/packages/jumpstarter-testing/pyproject.toml index 6b3f90ac..2b5f84e0 100644 --- a/python/packages/jumpstarter-testing/pyproject.toml +++ b/python/packages/jumpstarter-testing/pyproject.toml @@ -23,7 +23,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/python/packages/jumpstarter/pyproject.toml b/python/packages/jumpstarter/pyproject.toml index 61e54487..a09ce03f 100644 --- a/python/packages/jumpstarter/pyproject.toml +++ b/python/packages/jumpstarter/pyproject.toml @@ -44,7 +44,7 @@ source_archive = "https://github.com/jumpstarter-dev/repo/archive/{commit_hash}. [tool.hatch.version] source = "vcs" -raw-options = { 'root' = '../../' } +raw-options = { 'root' = '../../../' } [build-system] requires = ["hatchling", "hatch-vcs", "hatch-pin-jumpstarter"] diff --git a/typos.toml b/typos.toml new file mode 100644 index 00000000..3cc13976 --- /dev/null +++ b/typos.toml @@ -0,0 +1,33 @@ +# Typos configuration for Jumpstarter monorepo +# https://github.com/crate-ci/typos + +[default] +extend-ignore-re = [ + # Ignore hash strings (like 321ba1) + "[a-f0-9]{6,}", +] + +[default.extend-words] +# ANDed/ORed are valid technical terms (combined with AND/OR operations) +ANDed = "ANDed" +Ded = "Ded" # suffix of ANDed in generated CRD docs +ORed = "ORed" + +# mosquitto is the name of an MQTT broker, not a typo of "mosquito" +mosquitto = "mosquitto" + +# ser is short for "serialize" in variable names like ser_json_timedelta +ser = "ser" + +[type.gomod] +# Exclude go.mod and go.sum from spell checking +extend-glob = ["go.mod", "go.sum"] +check-file = false + +[files] +extend-exclude = [ + # Generated files that shouldn't be spell-checked + "*.lock", + # Vendored dependencies + "vendor/", +] From b0623f71633e1656214fdeb812e3f32f632c5be0 Mon Sep 17 00:00:00 2001 From: Miguel Angel Ajo Pelayo Date: Wed, 21 Jan 2026 22:14:31 +0100 Subject: [PATCH 03/30] Fix multiversion docs script for monorepo Update multiversion.sh to use correct paths with python/ prefix in worktree structure. --- python/docs/multiversion.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/docs/multiversion.sh b/python/docs/multiversion.sh index 02291d37..10c2f11b 100755 --- a/python/docs/multiversion.sh +++ b/python/docs/multiversion.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -euox pipefail -declare -a BRANCHES=("main" "release-0.5" "release-0.6" "release-0.7") +declare -a BRANCHES=("main" "release-0.6" "release-0.7") # https://stackoverflow.com/a/246128 SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) @@ -15,10 +15,10 @@ for BRANCH in "${BRANCHES[@]}"; do git worktree add --force "${WORKTREE}" "${BRANCH}" - uv run --project "${WORKTREE}" --isolated --all-packages --group docs \ - make -C "${WORKTREE}/docs" html SPHINXOPTS="-D version=${BRANCH}" + uv run --project "${WORKTREE}/python" --isolated --all-packages --group docs \ + make -C "${WORKTREE}/python/docs" html SPHINXOPTS="-D version=${BRANCH}" - cp -r "${WORKTREE}/docs/build/html" "${OUTPUT_DIR}/${BRANCH}" + cp -r "${WORKTREE}/python/docs/build/html" "${OUTPUT_DIR}/${BRANCH}" git worktree remove --force "${WORKTREE}" done From 3f40eecb6a53d9997e2d5ebd617e8a552315237d Mon Sep 17 00:00:00 2001 From: Miguel Angel Ajo Pelayo Date: Wed, 21 Jan 2026 22:14:31 +0100 Subject: [PATCH 04/30] Fix controller and e2e configurations - Update Python container files for monorepo build paths - Copy Kind cluster config with dex nodeport pre-configured - Configure controller and e2e values with certificate placeholder - Patch deploy_with_helm.sh to support EXTRA_VALUES for Helm overlay pattern --- controller/hack/deploy_with_helm.sh | 2 +- controller/hack/kind_cluster.yaml | 17 +++++++++-------- python/.devfile/Containerfile.client | 4 ++-- python/Dockerfile | 4 ++-- 4 files changed, 14 insertions(+), 13 deletions(-) diff --git a/controller/hack/deploy_with_helm.sh b/controller/hack/deploy_with_helm.sh index abdada4b..3a54159a 100755 --- a/controller/hack/deploy_with_helm.sh +++ b/controller/hack/deploy_with_helm.sh @@ -51,7 +51,7 @@ helm ${METHOD} --namespace jumpstarter-lab \ --create-namespace \ ${HELM_SETS} \ --set global.timestamp=$(date +%s) \ - --values ./deploy/helm/jumpstarter/values.kind.yaml jumpstarter \ + --values ./deploy/helm/jumpstarter/values.kind.yaml ${EXTRA_VALUES} jumpstarter \ ./deploy/helm/jumpstarter/ kubectl config set-context --current --namespace=jumpstarter-lab diff --git a/controller/hack/kind_cluster.yaml b/controller/hack/kind_cluster.yaml index 6478b336..9cdf74ee 100644 --- a/controller/hack/kind_cluster.yaml +++ b/controller/hack/kind_cluster.yaml @@ -17,24 +17,25 @@ nodes: - containerPort: 80 # ingress controller hostPort: 5080 protocol: TCP - - containerPort: 443 - hostPort: 5443 - protocol: TCP - containerPort: 30010 # grpc nodeport hostPort: 8082 protocol: TCP - - containerPort: 30011 # grpc router nodeport (replica 0) + - containerPort: 30011 # grpc router nodeport hostPort: 8083 protocol: TCP - - containerPort: 30012 # grpc router nodeport (replica 1) + - containerPort: 30012 # grpc router nodeport hostPort: 8084 protocol: TCP - - containerPort: 30013 # grpc router nodeport (replica 2) + - containerPort: 30013 # grpc router nodeport hostPort: 8085 protocol: TCP - + - containerPort: 32000 # dex nodeport + hostPort: 5556 + protocol: TCP + - containerPort: 443 + hostPort: 5443 + protocol: TCP # if we needed to mount a hostPath volume into the kind cluster, we can do it like this # extraMounts: # - hostPath: ./bin/e2e-certs # containerPath: /tmp/e2e-certs - diff --git a/python/.devfile/Containerfile.client b/python/.devfile/Containerfile.client index 6dbba09e..3eabc5df 100644 --- a/python/.devfile/Containerfile.client +++ b/python/.devfile/Containerfile.client @@ -6,7 +6,7 @@ RUN dnf install -y make git && \ rm -rf /var/cache/dnf COPY --from=uv /uv /uvx /bin/ ADD . /src -RUN make -C /src build +RUN make -C /src/python build FROM quay.io/devfile/base-developer-image:ubi9-latest @@ -28,7 +28,7 @@ RUN dnf -y install make git python3.12 python3.12 libusbx python3-pyusb python3. USER 10001 -RUN --mount=from=builder,source=/src/dist,target=/dist python3.12 -m pip install /dist/*.whl +RUN --mount=from=builder,source=/src/python/dist,target=/dist python3.12 -m pip install /dist/*.whl RUN python3.12 -m pip install pytest diff --git a/python/Dockerfile b/python/Dockerfile index 5b2ffb48..041f8bc1 100644 --- a/python/Dockerfile +++ b/python/Dockerfile @@ -14,10 +14,10 @@ COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ FROM builder AS wheels ADD . /src -RUN make -C /src build +RUN make -C /src/python build FROM product -RUN --mount=from=wheels,source=/src/dist,target=/dist \ +RUN --mount=from=wheels,source=/src/python/dist,target=/dist \ uv venv /jumpstarter && \ VIRTUAL_ENV=/jumpstarter uv pip install /dist/*.whl ENV PATH="/jumpstarter/bin:$PATH" From d16861e84ae72b3907f9af776bd4b66185c5e956 Mon Sep 17 00:00:00 2001 From: Miguel Angel Ajo Pelayo Date: Wed, 21 Jan 2026 22:14:31 +0100 Subject: [PATCH 05/30] Configure GitHub Actions and e2e test scripts - Add unified GitHub Actions workflows with path filters - Configure dependabot for all package ecosystems - Remove old .github directories from subdirectories - Install e2e test scripts (setup-e2e.sh, run-e2e.sh, tests.bats) --- .github/dependabot.yml | 34 ++ .../workflows/backport.yaml | 0 .../workflows/build-images.yaml | 75 +++- .../workflows/build-oci-bundle.yaml | 7 +- .../workflows/controller-bundle.yaml | 14 +- .../workflows/controller-kind.yaml | 9 +- .../workflows/controller-tests.yaml | 13 +- .../workflows/documentation.yaml | 11 +- .github/workflows/e2e.yaml | 44 +++ .github/workflows/lint.yaml | 109 ++++++ .../workflows/pr-analytics.yaml | 14 +- .../workflows/python-tests.yaml | 15 +- .../workflows/trigger-packages.yaml | 0 controller/.github/workflows/backport.yaml | 42 -- controller/.github/workflows/e2e.yaml | 28 -- controller/.github/workflows/lint.yaml | 39 -- controller/typos.toml | 6 - e2e/.github/workflows/selftest.yml | 19 - e2e/action.yml | 99 ----- e2e/run-e2e.sh | 170 ++++++++ e2e/setup-e2e.sh | 364 ++++++++++++++++++ e2e/tests.bats | 61 ++- protocol/.github/workflows/lint.yaml | 20 - python/.github/dependabot.yml | 12 - python/.github/workflows/backport.yml | 41 -- python/.github/workflows/build.yaml | 111 ------ python/.github/workflows/e2e.yaml | 24 -- python/.github/workflows/ruff.yaml | 23 -- python/.github/workflows/typos.yaml | 21 - 29 files changed, 896 insertions(+), 529 deletions(-) create mode 100644 .github/dependabot.yml rename e2e/.github/workflows/backport.yml => .github/workflows/backport.yaml (100%) rename controller/.github/workflows/build.yaml => .github/workflows/build-images.yaml (56%) rename python/.github/workflows/build_oci_bundle.yaml => .github/workflows/build-oci-bundle.yaml (73%) rename controller/.github/workflows/check-bundle.yaml => .github/workflows/controller-bundle.yaml (91%) rename controller/.github/workflows/pr-kind.yaml => .github/workflows/controller-kind.yaml (73%) rename controller/.github/workflows/test.yaml => .github/workflows/controller-tests.yaml (59%) rename {python/.github => .github}/workflows/documentation.yaml (92%) create mode 100644 .github/workflows/e2e.yaml create mode 100644 .github/workflows/lint.yaml rename python/.github/workflows/pr_analytics.yaml => .github/workflows/pr-analytics.yaml (59%) rename python/.github/workflows/pytest.yaml => .github/workflows/python-tests.yaml (87%) rename python/.github/workflows/trigger-packages-index.yaml => .github/workflows/trigger-packages.yaml (100%) delete mode 100644 controller/.github/workflows/backport.yaml delete mode 100644 controller/.github/workflows/e2e.yaml delete mode 100644 controller/.github/workflows/lint.yaml delete mode 100644 controller/typos.toml delete mode 100644 e2e/.github/workflows/selftest.yml delete mode 100644 e2e/action.yml create mode 100755 e2e/run-e2e.sh create mode 100755 e2e/setup-e2e.sh delete mode 100644 protocol/.github/workflows/lint.yaml delete mode 100644 python/.github/dependabot.yml delete mode 100644 python/.github/workflows/backport.yml delete mode 100644 python/.github/workflows/build.yaml delete mode 100644 python/.github/workflows/e2e.yaml delete mode 100644 python/.github/workflows/ruff.yaml delete mode 100644 python/.github/workflows/typos.yaml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..e9ff035b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,34 @@ +# Dependabot configuration for monorepo +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + # Go modules for controller + - package-ecosystem: "gomod" + directory: "/controller" + schedule: + interval: weekly + + # Go modules for operator + - package-ecosystem: "gomod" + directory: "/controller/deploy/operator" + schedule: + interval: weekly + + # Python dependencies + - package-ecosystem: "pip" + directory: "/python" + schedule: + interval: weekly + + # Devcontainers + - package-ecosystem: "devcontainers" + directory: "/" + schedule: + interval: weekly + + # GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: weekly diff --git a/e2e/.github/workflows/backport.yml b/.github/workflows/backport.yaml similarity index 100% rename from e2e/.github/workflows/backport.yml rename to .github/workflows/backport.yaml diff --git a/controller/.github/workflows/build.yaml b/.github/workflows/build-images.yaml similarity index 56% rename from controller/.github/workflows/build.yaml rename to .github/workflows/build-images.yaml index bf3509f8..7dd9b361 100644 --- a/controller/.github/workflows/build.yaml +++ b/.github/workflows/build-images.yaml @@ -1,4 +1,5 @@ -name: Build and push container image +name: Build and push container images + on: workflow_dispatch: push: @@ -7,8 +8,10 @@ on: branches: - main - 'release-*' + merge_group: env: + PUSH: ${{ github.repository_owner == 'jumpstarter-dev' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/release-')) }} REGISTRY: quay.io QUAY_ORG: quay.io/jumpstarter-dev @@ -17,18 +20,35 @@ jobs: runs-on: ubuntu-latest permissions: contents: read + packages: write + attestations: write + id-token: write strategy: matrix: include: + # Controller images - image_name: jumpstarter-dev/jumpstarter-controller - dockerfile: Dockerfile - context: . + dockerfile: controller/Dockerfile + context: controller - image_name: jumpstarter-dev/jumpstarter-operator - dockerfile: Dockerfile.operator - context: . + dockerfile: controller/Dockerfile.operator + context: controller - image_name: jumpstarter-dev/jumpstarter-operator-bundle - dockerfile: deploy/operator/bundle.Dockerfile - context: deploy/operator + dockerfile: controller/deploy/operator/bundle.Dockerfile + context: controller/deploy/operator + # Python images (use repo root context for .git access needed by hatch-vcs) + - image_name: jumpstarter-dev/jumpstarter + dockerfile: python/Dockerfile + context: . + - image_name: jumpstarter-dev/jumpstarter-utils + dockerfile: python/Dockerfile.utils + context: python + - image_name: jumpstarter-dev/jumpstarter-dev + dockerfile: python/.devfile/Containerfile + context: python + - image_name: jumpstarter-dev/jumpstarter-devspace + dockerfile: python/.devfile/Containerfile.client + context: . steps: - name: Checkout repository uses: actions/checkout@v4 @@ -41,6 +61,17 @@ jobs: VERSION=${VERSION#v} # remove the leading v prefix for version echo "VERSION=${VERSION}" >> $GITHUB_ENV echo "VERSION=${VERSION}" + + # Convert to PEP 440 compliant version for Python packages + # Format: 0.7.0-1051-g54cd2f08 -> 0.7.0.dev1051+g54cd2f08 + if [[ "$VERSION" =~ ^([0-9]+\.[0-9]+\.[0-9]+)-([0-9]+)-g([a-f0-9]+)$ ]]; then + PEP440_VERSION="${BASH_REMATCH[1]}.dev${BASH_REMATCH[2]}+g${BASH_REMATCH[3]}" + else + # If it's already a clean version (e.g., 0.7.0), use as-is + PEP440_VERSION="$VERSION" + fi + echo "PEP440_VERSION=${PEP440_VERSION}" >> $GITHUB_ENV + echo "PEP440_VERSION=${PEP440_VERSION}" - name: Set build args id: build-args @@ -53,6 +84,7 @@ jobs: echo "BUILD_DATE=${BUILD_DATE}" - name: Set image tags + if: ${{ env.PUSH == 'true' }} id: set-tags run: | TAGS="${{ env.REGISTRY }}/${{ matrix.image_name }}:${{ env.VERSION }}" @@ -61,7 +93,7 @@ jobs: TAGS="$TAGS,${{ env.REGISTRY }}/${{ matrix.image_name }}:latest" fi - if [[ "${{ github.ref }}" == "refs/heads/release-*" ]]; then + if [[ "${{ github.ref }}" == refs/heads/release-* ]]; then RELEASE_BRANCH_NAME=$(basename "${{ github.ref }}") TAGS="$TAGS,${{ env.REGISTRY }}/${{ matrix.image_name }}:${RELEASE_BRANCH_NAME}" fi @@ -76,29 +108,46 @@ jobs: - name: Log in to the Container registry uses: docker/login-action@v3 + if: ${{ env.PUSH == 'true' }} with: registry: ${{ env.REGISTRY }} username: jumpstarter-dev+jumpstarter_ci password: ${{ secrets.QUAY_TOKEN }} + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ matrix.image_name }} + - name: Build and push Docker image id: push uses: docker/build-push-action@v6 with: context: ${{ matrix.context }} file: ${{ matrix.dockerfile }} - push: true + push: ${{ env.PUSH }} tags: ${{ steps.set-tags.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} platforms: linux/amd64,linux/arm64 cache-from: type=gha cache-to: type=gha,mode=max build-args: | - GIT_VERSION=${{ env.VERSION }} + GIT_VERSION=${{ env.PEP440_VERSION }} GIT_COMMIT=${{ steps.build-args.outputs.git_commit }} BUILD_DATE=${{ steps.build-args.outputs.build_date }} - publish-helm-charts-containers: + - name: Generate artifact attestation + uses: actions/attest-build-provenance@v1 + if: ${{ env.PUSH == 'true' }} + with: + subject-name: ${{ env.REGISTRY }}/${{ matrix.image_name }} + subject-digest: ${{ steps.push.outputs.digest }} + push-to-registry: ${{ env.PUSH }} + + publish-helm-charts: needs: build-and-push-image + if: ${{ github.repository_owner == 'jumpstarter-dev' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/release-')) }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -116,8 +165,8 @@ jobs: run: | echo packaging ${VERSION} # patch the sub-chart app-version, because helm package won't do it - sed -i "s/^appVersion:.*/appVersion: $VERSION/" deploy/helm/jumpstarter/charts/jumpstarter-controller/Chart.yaml - helm package ./deploy/helm/jumpstarter --version "${VERSION}" --app-version "${VERSION}" + sed -i "s/^appVersion:.*/appVersion: $VERSION/" controller/deploy/helm/jumpstarter/charts/jumpstarter-controller/Chart.yaml + helm package ./controller/deploy/helm/jumpstarter --version "${VERSION}" --app-version "${VERSION}" - name: Login helm env: diff --git a/python/.github/workflows/build_oci_bundle.yaml b/.github/workflows/build-oci-bundle.yaml similarity index 73% rename from python/.github/workflows/build_oci_bundle.yaml rename to .github/workflows/build-oci-bundle.yaml index d06d14b7..f130f58a 100644 --- a/python/.github/workflows/build_oci_bundle.yaml +++ b/.github/workflows/build-oci-bundle.yaml @@ -1,4 +1,5 @@ name: Build and push buildroot-based flasher OCI bundle + on: workflow_dispatch: @@ -14,17 +15,17 @@ jobs: - name: Run build_fits.sh run: | - cd packages/jumpstarter-driver-flashers/oci_bundles/aarch64-itb + cd python/packages/jumpstarter-driver-flashers/oci_bundles/aarch64-itb ./build_fits.sh - name: Upload FIT artifacts uses: actions/upload-artifact@v4 with: name: FIT-images - path: packages/jumpstarter-driver-flashers/oci_bundles/aarch64-itb/data/*.itb + path: python/packages/jumpstarter-driver-flashers/oci_bundles/aarch64-itb/data/*.itb - name: Run build_bundle.sh for aarch64-itb run: | - cd packages/jumpstarter-driver-flashers/oci_bundles && dnf install -y oras + cd python/packages/jumpstarter-driver-flashers/oci_bundles && dnf install -y oras oras login quay.io -u jumpstarter-dev+jumpstarter_ci --password-stdin <<< "${{ secrets.QUAY_TOKEN }}" ./build_bundle.sh quay.io/jumpstarter-dev/jumpstarter-flasher-aarch64-itb:latest aarch64-itb diff --git a/controller/.github/workflows/check-bundle.yaml b/.github/workflows/controller-bundle.yaml similarity index 91% rename from controller/.github/workflows/check-bundle.yaml rename to .github/workflows/controller-bundle.yaml index b988051c..ab9dc5eb 100644 --- a/controller/.github/workflows/check-bundle.yaml +++ b/.github/workflows/controller-bundle.yaml @@ -1,9 +1,12 @@ name: Check Bundle + on: pull_request: branches: - main - 'release-*' + paths: + - 'controller/**' jobs: check-bundle: @@ -17,13 +20,13 @@ jobs: - name: Set up Go uses: actions/setup-go@v5 with: - go-version: 1.24 + go-version: '1.24' - name: Cache bin directory (deploy/operator) uses: actions/cache@v4 with: - path: deploy/operator/bin/ - key: ${{ runner.os }}-operator-bin-${{ hashFiles('deploy/operator/go.mod') }} + path: controller/deploy/operator/bin/ + key: ${{ runner.os }}-operator-bin-${{ hashFiles('controller/deploy/operator/go.mod') }} restore-keys: | ${{ runner.os }}-operator-bin- @@ -47,7 +50,7 @@ jobs: echo "TAG=${TAG}" - name: Run make bundle - working-directory: deploy/operator + working-directory: controller/deploy/operator run: | make bundle IMG="quay.io/jumpstarter-dev/jumpstarter-operator:${TAG}" @@ -78,7 +81,7 @@ jobs: git checkout -- . || true - name: Run make build-installer - working-directory: deploy/operator + working-directory: controller/deploy/operator run: | make build-installer @@ -92,4 +95,3 @@ jobs: else echo "No uncommitted changes detected. Installer files are up to date." fi - diff --git a/controller/.github/workflows/pr-kind.yaml b/.github/workflows/controller-kind.yaml similarity index 73% rename from controller/.github/workflows/pr-kind.yaml rename to .github/workflows/controller-kind.yaml index 722696f2..fa152b63 100644 --- a/controller/.github/workflows/pr-kind.yaml +++ b/.github/workflows/controller-kind.yaml @@ -1,10 +1,13 @@ name: Kind based CI + on: workflow_dispatch: pull_request: branches: - main - 'release-*' + paths: + - 'controller/**' jobs: deploy-kind: @@ -16,6 +19,7 @@ jobs: fetch-depth: 0 - name: Run make deploy + working-directory: controller run: make deploy e2e-test-operator: @@ -26,5 +30,6 @@ jobs: with: fetch-depth: 0 - - name: Run make deploy - run: make test-operator-e2e \ No newline at end of file + - name: Run operator e2e test + working-directory: controller + run: make test-operator-e2e diff --git a/controller/.github/workflows/test.yaml b/.github/workflows/controller-tests.yaml similarity index 59% rename from controller/.github/workflows/test.yaml rename to .github/workflows/controller-tests.yaml index 1e0ed26e..ca6a11a1 100644 --- a/controller/.github/workflows/test.yaml +++ b/.github/workflows/controller-tests.yaml @@ -1,10 +1,14 @@ -name: Unit/Functional tests +name: Controller Unit/Functional tests + on: workflow_dispatch: pull_request: branches: - main - 'release-*' + paths: + - 'controller/**' + - 'protocol/**' jobs: tests: @@ -16,15 +20,16 @@ jobs: fetch-depth: 0 - name: Run controller tests + working-directory: controller run: make test - name: Cache operator bin directory uses: actions/cache@v4 with: - path: deploy/operator/bin/ - key: ${{ runner.os }}-operator-bin-${{ hashFiles('deploy/operator/go.mod') }} + path: controller/deploy/operator/bin/ + key: ${{ runner.os }}-operator-bin-${{ hashFiles('controller/deploy/operator/go.mod') }} restore-keys: | ${{ runner.os }}-operator-bin- - name: Run operator tests - run: make -C deploy/operator test + run: make -C controller/deploy/operator test diff --git a/python/.github/workflows/documentation.yaml b/.github/workflows/documentation.yaml similarity index 92% rename from python/.github/workflows/documentation.yaml rename to .github/workflows/documentation.yaml index 75e9758d..bb7402d0 100644 --- a/python/.github/workflows/documentation.yaml +++ b/.github/workflows/documentation.yaml @@ -1,10 +1,16 @@ -name: documentation +name: Documentation on: # Runs on pushes targeting the default branch push: branches: ["main"] + paths: + - 'python/docs/**' + - 'python/packages/**' pull_request: + paths: + - 'python/docs/**' + - 'python/packages/**' merge_group: # Allows you to run this workflow manually from the Actions tab @@ -25,6 +31,7 @@ concurrency: defaults: run: shell: bash + working-directory: python jobs: # Build job @@ -66,7 +73,7 @@ jobs: - name: Upload artifact uses: actions/upload-pages-artifact@v3 with: - path: ./docs/build + path: ./python/docs/build check-warnings: runs-on: ubuntu-latest diff --git a/.github/workflows/e2e.yaml b/.github/workflows/e2e.yaml new file mode 100644 index 00000000..d13d6472 --- /dev/null +++ b/.github/workflows/e2e.yaml @@ -0,0 +1,44 @@ +name: End-to-end tests + +on: + workflow_dispatch: + pull_request: + branches: + - main + - 'release-*' + merge_group: + +permissions: + contents: read + +jobs: + e2e-tests: + if: github.repository_owner == 'jumpstarter-dev' + strategy: + matrix: + os: + - ubuntu-24.04 + - ubuntu-24.04-arm + runs-on: ${{ matrix.os }} + timeout-minutes: 60 + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v2 + + - name: Install Go + uses: actions/setup-go@v5 + with: + go-version: '1.22' + + - name: Setup e2e test environment + run: make e2e-setup + env: + CI: true + + - name: Run e2e tests + run: make e2e-run + env: + CI: true diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml new file mode 100644 index 00000000..a08f98cb --- /dev/null +++ b/.github/workflows/lint.yaml @@ -0,0 +1,109 @@ +name: Linters + +on: + workflow_dispatch: + push: + branches: + - main + - 'release-*' + pull_request: + branches: + - main + - 'release-*' + merge_group: + +permissions: + contents: read + pull-requests: read + +jobs: + # Detect which paths changed to conditionally run linters + changes: + runs-on: ubuntu-latest + outputs: + controller: ${{ steps.filter.outputs.controller }} + helm: ${{ steps.filter.outputs.helm }} + protocol: ${{ steps.filter.outputs.protocol }} + python: ${{ steps.filter.outputs.python }} + steps: + - uses: actions/checkout@v4 + - uses: dorny/paths-filter@v3 + id: filter + with: + filters: | + controller: + - 'controller/**' + helm: + - 'controller/deploy/helm/**' + protocol: + - 'protocol/**' + python: + - 'python/**' + + lint-go: + needs: changes + if: needs.changes.outputs.controller == 'true' + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version: '1.24' + + - name: Run go linter + working-directory: controller + run: make lint + + lint-helm: + needs: changes + if: needs.changes.outputs.helm == 'true' + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Run helm linter + working-directory: controller + run: make lint-helm + + lint-protobuf: + needs: changes + if: needs.changes.outputs.protocol == 'true' + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Run protobuf linter + working-directory: protocol + run: make lint + + lint-python: + needs: changes + if: needs.changes.outputs.python == 'true' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run ruff + uses: astral-sh/ruff-action@84f83ecf9e1e15d26b7984c7ec9cf73d39ffc946 # v3.3.1 + with: + src: './python' + version-file: python/pyproject.toml + + typos: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run typos + uses: crate-ci/typos@0f0ccba9ed1df83948f0c15026e4f5ccfce46109 # v1.32.0 + with: + config: ./typos.toml diff --git a/python/.github/workflows/pr_analytics.yaml b/.github/workflows/pr-analytics.yaml similarity index 59% rename from python/.github/workflows/pr_analytics.yaml rename to .github/workflows/pr-analytics.yaml index 60019b59..401337a1 100644 --- a/python/.github/workflows/pr_analytics.yaml +++ b/.github/workflows/pr-analytics.yaml @@ -1,4 +1,5 @@ -name: "PR Analytics" +name: PR Analytics + on: workflow_dispatch: inputs: @@ -6,22 +7,23 @@ on: description: "Report date start(d/MM/yyyy)" report_date_end: description: "Report date end(d/MM/yyyy)" + jobs: create-report: - name: "Create report" + name: Create report runs-on: ubuntu-latest permissions: contents: read pull-requests: read issues: write steps: - - name: "Run script for analytics" + - name: Run script for analytics uses: AlexSim93/pull-request-analytics-action@cc57ceb92148c5d5879ca578a2b59f99c3cbe231 # v4.6.1 with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # In the case of a personal access token, it needs to be added to the repository's secrets and used in this field. + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_REPO_FOR_ISSUE: jumpstarter - GITHUB_OWNER_FOR_ISSUE: jumpstarter-dev - GITHUB_OWNERS_REPOS: jumpstarter-dev/jumpstarter #TODO: check with more repos later, needs PAT: ,jumpstarter-dev/jumpstarter-controller + GITHUB_OWNER_FOR_ISSUE: jumpstarter-dev + GITHUB_OWNERS_REPOS: jumpstarter-dev/jumpstarter USE_CHARTS: true TIMEZONE: "Etc/UTC" REPORT_DATE_START: ${{ inputs.report_date_start }} diff --git a/python/.github/workflows/pytest.yaml b/.github/workflows/python-tests.yaml similarity index 87% rename from python/.github/workflows/pytest.yaml rename to .github/workflows/python-tests.yaml index 81b8ae52..045d5c78 100644 --- a/python/.github/workflows/pytest.yaml +++ b/.github/workflows/python-tests.yaml @@ -1,16 +1,24 @@ -name: "Run Tests" +name: Python Tests + on: workflow_dispatch: push: branches: - main - release-* + paths: + - 'python/**' + - 'protocol/**' pull_request: + paths: + - 'python/**' + - 'protocol/**' merge_group: permissions: contents: read pull-requests: read + jobs: pytest-matrix: runs-on: ${{ matrix.runs-on }} @@ -60,18 +68,19 @@ jobs: id: cache-fedora-cloud-images uses: actions/cache@v4 with: - path: packages/jumpstarter-driver-qemu/images + path: python/packages/jumpstarter-driver-qemu/images key: fedora-cloud-41-1.4 - name: Download Fedora Cloud images if: steps.cache-fedora-cloud-images.outputs.cache-hit != 'true' run: | for arch in aarch64 x86_64; do - curl -L --output "packages/jumpstarter-driver-qemu/images/Fedora-Cloud-Base-Generic-41-1.4.${arch}.qcow2" \ + curl -L --output "python/packages/jumpstarter-driver-qemu/images/Fedora-Cloud-Base-Generic-41-1.4.${arch}.qcow2" \ "https://download.fedoraproject.org/pub/fedora/linux/releases/41/Cloud/${arch}/images/Fedora-Cloud-Base-Generic-41-1.4.${arch}.qcow2" done - name: Run pytest + working-directory: python run: | make test diff --git a/python/.github/workflows/trigger-packages-index.yaml b/.github/workflows/trigger-packages.yaml similarity index 100% rename from python/.github/workflows/trigger-packages-index.yaml rename to .github/workflows/trigger-packages.yaml diff --git a/controller/.github/workflows/backport.yaml b/controller/.github/workflows/backport.yaml deleted file mode 100644 index bb11015b..00000000 --- a/controller/.github/workflows/backport.yaml +++ /dev/null @@ -1,42 +0,0 @@ -# WARNING: -# When extending this action, be aware that $GITHUB_TOKEN allows write access to -# the GitHub repository. This means that it should not evaluate user input in a -# way that allows code injection. - -name: Backport - -on: - pull_request_target: - types: [closed, labeled] - -permissions: {} - -jobs: - backport: - name: Backport Pull Request - if: github.repository_owner == 'jumpstarter-dev' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name)) - runs-on: ubuntu-24.04 - steps: - # Use a GitHub App to create the PR so that CI gets triggered - # The App is scoped to Repository > Contents and Pull Requests: write for jumpstarter-dev - - uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2.0.2 - id: app-token - with: - app-id: ${{ secrets.JUMPSTARTER_BACKPORT_BOT_APP_ID }} - private-key: ${{ secrets.JUMPSTARTER_BACKPORT_BOT_PRIVATE_KEY }} - - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - ref: ${{ github.event.pull_request.head.sha }} - token: ${{ steps.app-token.outputs.token }} - - - name: Create backport PRs - uses: korthout/backport-action@436145e922f9561fc5ea157ff406f21af2d6b363 # v3.2.0 - with: - # Config README: https://github.com/korthout/backport-action#backport-action - github_token: ${{ steps.app-token.outputs.token }} - conflict_resolution: draft_commit_conflicts - merge_commits: skip - pull_description: |- - Bot-based backport to `${target_branch}`, triggered by a label in #${pull_number}. - diff --git a/controller/.github/workflows/e2e.yaml b/controller/.github/workflows/e2e.yaml deleted file mode 100644 index f8fbb5a8..00000000 --- a/controller/.github/workflows/e2e.yaml +++ /dev/null @@ -1,28 +0,0 @@ -name: End-to-end tests -on: - workflow_dispatch: - pull_request: - branches: - - main - - 'release-*' -jobs: - e2e-tests: - strategy: - matrix: - os: - - ubuntu-24.04 - - ubuntu-24.04-arm - runs-on: ${{ matrix.os }} - steps: - - uses: jumpstarter-dev/jumpstarter-e2e@main - with: - controller-ref: ${{ github.ref }} - # use the matching branch on the jumpstarter repo - jumpstarter-ref: ${{ github.event.pull_request.base.ref }} - e2e-tests-release-0-7: - runs-on: ubuntu-latest - steps: - - uses: jumpstarter-dev/jumpstarter-e2e@release-0.7 - with: - controller-ref: ${{ github.ref }} - jumpstarter-ref: release-0.7 diff --git a/controller/.github/workflows/lint.yaml b/controller/.github/workflows/lint.yaml deleted file mode 100644 index a0730189..00000000 --- a/controller/.github/workflows/lint.yaml +++ /dev/null @@ -1,39 +0,0 @@ -name: Linters -on: - workflow_dispatch: - push: - branches: - - main - - 'release-*' - pull_request: - branches: - - main - - 'release-*' - -jobs: - lint-helm: - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Run helm linter - run: make lint-helm - - lint-go: - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Set up Go - uses: actions/setup-go@v2 - with: - go-version: 1.24 - - - name: Run go linter - run: make lint diff --git a/controller/typos.toml b/controller/typos.toml deleted file mode 100644 index e8741358..00000000 --- a/controller/typos.toml +++ /dev/null @@ -1,6 +0,0 @@ -[default.extend-words] -Ded = "Ded" # from ANDed - -[type.gomod] -extend-glob = ["go.mod", "go.sum"] -check-file = false diff --git a/e2e/.github/workflows/selftest.yml b/e2e/.github/workflows/selftest.yml deleted file mode 100644 index 086e3772..00000000 --- a/e2e/.github/workflows/selftest.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: selftest -on: - - push -jobs: - test: - strategy: - matrix: - os: - - ubuntu-24.04 - - ubuntu-24.04-arm - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v4 - with: - path: e2e - - uses: ./e2e - with: - controller-ref: main - jumpstarter-ref: main diff --git a/e2e/action.yml b/e2e/action.yml deleted file mode 100644 index ab69a87c..00000000 --- a/e2e/action.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: 'Jumpstarter end-to-end testing' -inputs: - controller-ref: - description: 'jumpstarter-dev/jumpstarter-controller git ref' - required: true - jumpstarter-ref: - description: 'jumpstarter-dev/jumpstarter git ref' - required: true -runs: - using: "composite" - steps: - - name: Install uv - uses: astral-sh/setup-uv@v2 - - name: Install python - shell: bash - run: | - uv python install 3.12 - - name: Install bats - shell: bash - run: | - sudo apt-get update - sudo apt-get install -y bats bats-support bats-assert - - name: Checkout jumpstarter controller - uses: actions/checkout@v4 - with: - repository: jumpstarter-dev/jumpstarter-controller - ref: ${{ inputs.controller-ref }} - path: controller - - name: Checkout jumpstarter - uses: actions/checkout@v4 - with: - repository: jumpstarter-dev/jumpstarter - ref: ${{ inputs.jumpstarter-ref }} - path: jumpstarter - - name: Deploy dex - shell: bash - run: | - go run github.com/cloudflare/cfssl/cmd/cfssl@latest gencert -initca "$GITHUB_ACTION_PATH"/ca-csr.json | \ - go run github.com/cloudflare/cfssl/cmd/cfssljson@latest -bare ca - - go run github.com/cloudflare/cfssl/cmd/cfssl@latest gencert -ca=ca.pem -ca-key=ca-key.pem \ - -config="$GITHUB_ACTION_PATH"/ca-config.json -profile=www "$GITHUB_ACTION_PATH"/dex-csr.json | \ - go run github.com/cloudflare/cfssl/cmd/cfssljson@latest -bare server - - cp "$GITHUB_ACTION_PATH"/kind_cluster.yaml ./controller/hack/kind_cluster.yaml - make -C controller cluster - - kubectl create namespace dex - kubectl -n dex create secret tls dex-tls \ - --cert=server.pem \ - --key=server-key.pem - - go run github.com/mikefarah/yq/v4@latest -i \ - '.jumpstarter-controller.config.authentication.jwt[0].issuer.certificateAuthority = load_str("ca.pem")' \ - "$GITHUB_ACTION_PATH"/values.kind.yaml - - # important! - kubectl create clusterrolebinding oidc-reviewer \ - --clusterrole=system:service-account-issuer-discovery \ - --group=system:unauthenticated - - helm repo add dex https://charts.dexidp.io - helm install --namespace dex --wait -f "$GITHUB_ACTION_PATH"/dex.values.yaml dex dex/dex - - sudo cp ca.pem /usr/local/share/ca-certificates/dex.crt - sudo update-ca-certificates - - echo "127.0.0.1 dex.dex.svc.cluster.local" | sudo tee -a /etc/hosts - - name: Deploy jumpstarter controller - shell: bash - run: | - cp "$GITHUB_ACTION_PATH"/values.kind.yaml ./controller/deploy/helm/jumpstarter/values.kind.yaml - make -C controller deploy - - name: Install jumpstarter - shell: bash - run: | - uv venv - uv pip install \ - ./jumpstarter/packages/jumpstarter-cli \ - ./jumpstarter/packages/jumpstarter-driver-composite \ - ./jumpstarter/packages/jumpstarter-driver-power \ - ./jumpstarter/packages/jumpstarter-driver-opendal - - name: Run jumpstarter - shell: bash - run: | - export ENDPOINT=$(helm get values jumpstarter --output json | jq -r '."jumpstarter-controller".grpc.endpoint') - - sudo mkdir -p /etc/jumpstarter/exporters - sudo chown $USER /etc/jumpstarter/exporters - - export JS_NAMESPACE="jumpstarter-lab" - - . .venv/bin/activate - - export JUMPSTARTER_GRPC_INSECURE=1 - - kubectl create -n "${JS_NAMESPACE}" sa test-client-sa - kubectl create -n "${JS_NAMESPACE}" sa test-exporter-sa - - bats --show-output-of-passing-tests --verbose-run "$GITHUB_ACTION_PATH"/tests.bats diff --git a/e2e/run-e2e.sh b/e2e/run-e2e.sh new file mode 100755 index 00000000..203fad28 --- /dev/null +++ b/e2e/run-e2e.sh @@ -0,0 +1,170 @@ +#!/usr/bin/env bash +# Jumpstarter End-to-End Test Runner +# This script runs the e2e test suite (assumes setup-e2e.sh was run first) + +set -euo pipefail + +# Get the directory where this script is located +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +# Get the monorepo root (parent of e2e directory) +REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" + +# Color output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +log_info() { + echo -e "${GREEN}[INFO]${NC} $*" +} + +log_warn() { + echo -e "${YELLOW}[WARN]${NC} $*" +} + +log_error() { + echo -e "${RED}[ERROR]${NC} $*" +} + +# Check if running in CI +is_ci() { + [ -n "${CI:-}" ] || [ -n "${GITHUB_ACTIONS:-}" ] +} + +# Check if setup was completed +check_setup() { + if [ ! -f "$REPO_ROOT/.e2e-setup-complete" ]; then + log_error "Setup not complete! Please run setup-e2e.sh first:" + log_error " bash e2e/setup-e2e.sh" + log_error "" + log_error "Or in CI mode, run the full setup automatically" + return 1 + fi + + # Load setup configuration + source "$REPO_ROOT/.e2e-setup-complete" + + # Export SSL certificate paths for Python + export SSL_CERT_FILE + export REQUESTS_CA_BUNDLE + + # Verify critical components are still running + if ! kubectl get namespace "$JS_NAMESPACE" &> /dev/null; then + log_error "Namespace $JS_NAMESPACE not found. Please run setup-e2e.sh again." + return 1 + fi + + log_info "โœ“ Setup verified" + return 0 +} + +# Setup environment for bats +setup_bats_env() { + # Always set BATS_LIB_PATH to include local libraries + local LOCAL_BATS_LIB="$REPO_ROOT/.bats/lib" + + if [ -d "$LOCAL_BATS_LIB" ]; then + export BATS_LIB_PATH="$LOCAL_BATS_LIB:${BATS_LIB_PATH:-}" + log_info "Set BATS_LIB_PATH to local libraries: $BATS_LIB_PATH" + else + log_warn "Local bats libraries not found at $LOCAL_BATS_LIB" + log_warn "You may need to run setup-e2e.sh first" + fi +} + +# Run the tests +run_tests() { + log_info "Running jumpstarter e2e tests..." + + cd "$REPO_ROOT" + + # Activate virtual environment + if [ -f .venv/bin/activate ]; then + source .venv/bin/activate + else + log_error "Virtual environment not found. Please run setup-e2e.sh first." + exit 1 + fi + + # Use insecure GRPC for testing + export JUMPSTARTER_GRPC_INSECURE=1 + + # Export variables for bats + export JS_NAMESPACE="${JS_NAMESPACE}" + export ENDPOINT="${ENDPOINT}" + + # Setup bats environment + setup_bats_env + + # Run bats tests + log_info "Running bats tests..." + bats --show-output-of-passing-tests --verbose-run "$SCRIPT_DIR"/tests.bats +} + +# Full setup and run (for CI or first-time use) +full_run() { + log_info "Running full setup + test cycle..." + + if [ -f "$SCRIPT_DIR/setup-e2e.sh" ]; then + bash "$SCRIPT_DIR/setup-e2e.sh" + else + log_error "setup-e2e.sh not found!" + exit 1 + fi + + # After setup, load the configuration + if [ -f "$REPO_ROOT/.e2e-setup-complete" ]; then + source "$REPO_ROOT/.e2e-setup-complete" + # Export SSL certificate paths for Python + export SSL_CERT_FILE + export REQUESTS_CA_BUNDLE + fi + + run_tests +} + +# Main execution +main() { + # Default namespace + export JS_NAMESPACE="${JS_NAMESPACE:-jumpstarter-lab}" + + log_info "=== Jumpstarter E2E Test Runner ===" + log_info "Namespace: $JS_NAMESPACE" + log_info "Repository Root: $REPO_ROOT" + echo "" + + # If --full flag is passed, always run full setup + if [[ "${1:-}" == "--full" ]]; then + full_run + # In CI mode, check if setup was already done + elif is_ci; then + if check_setup 2>/dev/null; then + log_info "Setup already complete, skipping setup and running tests..." + run_tests + else + log_info "Setup not found in CI, running full setup..." + full_run + fi + else + # Local development: require setup to be done first + if check_setup; then + run_tests + else + log_error "" + log_error "Setup is required before running tests." + log_error "" + log_error "Options:" + log_error " 1. Run setup first: bash e2e/setup-e2e.sh" + log_error " 2. Run full cycle: bash e2e/run-e2e.sh --full" + exit 1 + fi + fi + + echo "" + log_info "โœ“โœ“โœ“ All e2e tests completed successfully! โœ“โœ“โœ“" +} + +# Run main function +main "$@" diff --git a/e2e/setup-e2e.sh b/e2e/setup-e2e.sh new file mode 100755 index 00000000..96a4c7e6 --- /dev/null +++ b/e2e/setup-e2e.sh @@ -0,0 +1,364 @@ +#!/usr/bin/env bash +# Jumpstarter End-to-End Testing Setup Script +# This script performs one-time setup for e2e testing + +set -euo pipefail + +# Get the directory where this script is located +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +# Get the monorepo root (parent of e2e directory) +REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" + +# Default namespace for tests +export JS_NAMESPACE="${JS_NAMESPACE:-jumpstarter-lab}" + +# Color output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +log_info() { + echo -e "${GREEN}[INFO]${NC} $*" +} + +log_warn() { + echo -e "${YELLOW}[WARN]${NC} $*" +} + +log_error() { + echo -e "${RED}[ERROR]${NC} $*" +} + +# Check if running in CI +is_ci() { + [ -n "${CI:-}" ] || [ -n "${GITHUB_ACTIONS:-}" ] +} + +# Check if bats libraries are available +check_bats_libraries() { + if ! command -v bats &> /dev/null; then + return 1 + fi + + # Try to load the libraries + if ! bats --version &> /dev/null; then + return 1 + fi + + # Check if libraries can be loaded by testing with a simple script + local test_file=$(mktemp) + cat > "$test_file" <<'EOF' +setup() { + bats_load_library bats-support + bats_load_library bats-assert +} + +@test "dummy" { + run echo "test" + assert_success +} +EOF + + # Run test with current BATS_LIB_PATH + if bats "$test_file" &> /dev/null; then + rm -f "$test_file" + return 0 + else + rm -f "$test_file" + return 1 + fi +} + +# Install bats libraries locally (works on all systems) +install_bats_libraries_local() { + local LIB_DIR="$REPO_ROOT/.bats/lib" + local ORIGINAL_DIR="$PWD" + + log_info "Installing bats helper libraries to $LIB_DIR..." + + mkdir -p "$LIB_DIR" + cd "$LIB_DIR" + + # Install bats-support + if [ ! -d "bats-support" ]; then + log_info "Cloning bats-support..." + git clone --depth 1 https://github.com/bats-core/bats-support.git + else + log_info "bats-support already installed" + fi + + # Install bats-assert + if [ ! -d "bats-assert" ]; then + log_info "Cloning bats-assert..." + git clone --depth 1 https://github.com/bats-core/bats-assert.git + else + log_info "bats-assert already installed" + fi + + # Install bats-file + if [ ! -d "bats-file" ]; then + log_info "Cloning bats-file..." + git clone --depth 1 https://github.com/bats-core/bats-file.git + else + log_info "bats-file already installed" + fi + + cd "$ORIGINAL_DIR" + + # Set BATS_LIB_PATH + export BATS_LIB_PATH="$LIB_DIR:${BATS_LIB_PATH:-}" + + log_info "โœ“ Bats libraries installed successfully" + log_info "BATS_LIB_PATH set to: $BATS_LIB_PATH" + + # Verify installation worked + if check_bats_libraries; then + log_info "โœ“ Libraries verified and working" + else + log_error "Libraries installed but verification failed" + log_error "Please check that the following directories exist:" + log_error " $LIB_DIR/bats-support" + log_error " $LIB_DIR/bats-assert" + exit 1 + fi +} + +# Step 1: Install dependencies +install_dependencies() { + log_info "Installing dependencies..." + + # Install uv if not already installed + if ! command -v uv &> /dev/null; then + log_info "Installing uv..." + curl -LsSf https://astral.sh/uv/install.sh | sh + export PATH="$HOME/.cargo/bin:$PATH" + fi + + # Install Python 3.12 + log_info "Installing Python 3.12..." + uv python install 3.12 + + # Install bats if not already installed + if ! command -v bats &> /dev/null; then + log_info "Installing bats..." + if is_ci; then + sudo apt-get update + sudo apt-get install -y bats + elif [[ "$OSTYPE" == "darwin"* ]]; then + log_info "Installing bats-core via Homebrew..." + brew install bats-core + else + log_error "bats not found. Please install it manually:" + log_error " Ubuntu/Debian: sudo apt-get install bats" + log_error " Fedora/RHEL: sudo dnf install bats" + log_error " macOS: brew install bats-core" + exit 1 + fi + fi + + # Always install bats libraries locally for consistency across all systems + # This ensures libraries work regardless of package manager or distribution + if ! check_bats_libraries; then + log_info "Installing bats libraries locally..." + install_bats_libraries_local + else + log_info "โœ“ Bats libraries are already available" + # Still set BATS_LIB_PATH to include local directory for consistency + export BATS_LIB_PATH="$REPO_ROOT/.bats/lib:${BATS_LIB_PATH:-}" + fi + + log_info "โœ“ Dependencies installed" +} + +# Step 2: Deploy dex +deploy_dex() { + log_info "Deploying dex..." + + cd "$REPO_ROOT" + + # Generate certificates + log_info "Generating certificates..." + go run github.com/cloudflare/cfssl/cmd/cfssl@latest gencert -initca "$SCRIPT_DIR"/ca-csr.json | \ + go run github.com/cloudflare/cfssl/cmd/cfssljson@latest -bare ca - + go run github.com/cloudflare/cfssl/cmd/cfssl@latest gencert -ca=ca.pem -ca-key=ca-key.pem \ + -config="$SCRIPT_DIR"/ca-config.json -profile=www "$SCRIPT_DIR"/dex-csr.json | \ + go run github.com/cloudflare/cfssl/cmd/cfssljson@latest -bare server + + + make -C controller cluster + + # Create dex namespace and TLS secret + log_info "Creating dex namespace and secrets..." + kubectl create namespace dex + kubectl -n dex create secret tls dex-tls \ + --cert=server.pem \ + --key=server-key.pem + + # Create .e2e directory for configuration files + log_info "Creating .e2e directory for local configuration..." + mkdir -p "$REPO_ROOT/.e2e" + + # Copy values.kind.yaml to .e2e and inject the CA certificate + log_info "Creating values file with CA certificate..." + cp "$SCRIPT_DIR"/values.kind.yaml "$REPO_ROOT/.e2e/values.kind.yaml" + + log_info "Injecting CA certificate into values..." + go run github.com/mikefarah/yq/v4@latest -i \ + '.jumpstarter-controller.config.authentication.jwt[0].issuer.certificateAuthority = load_str("ca.pem")' \ + "$REPO_ROOT/.e2e/values.kind.yaml" + + log_info "โœ“ Values file with CA certificate created at .e2e/values.kind.yaml" + + # Create OIDC reviewer binding (important!) + log_info "Creating OIDC reviewer cluster role binding..." + kubectl create clusterrolebinding oidc-reviewer \ + --clusterrole=system:service-account-issuer-discovery \ + --group=system:unauthenticated + + # Install dex via helm + log_info "Installing dex via helm..." + helm repo add dex https://charts.dexidp.io + helm install --namespace dex --wait -f "$SCRIPT_DIR"/dex.values.yaml dex dex/dex + + # Install CA certificate + log_info "Installing CA certificate..." + if [[ "$OSTYPE" == "darwin"* ]]; then + # this may be unnecessary, but keeping it here for now + #log_warn "About to add the CA certificate to your macOS login keychain" + #security add-trusted-cert -d -r trustRoot -k ~/Library/Keychains/login.keychain-db ca.pem + #log_info "โœ“ CA certificate added to macOS login keychain" + true + else + log_warn "About to install the CA certificate system-wide (requires sudo)" + # Detect if this is a RHEL/Fedora system or Debian/Ubuntu system + if [ -d "/etc/pki/ca-trust/source/anchors" ]; then + # RHEL/Fedora/CentOS + sudo cp ca.pem /etc/pki/ca-trust/source/anchors/dex.crt + sudo update-ca-trust + log_info "โœ“ CA certificate installed system-wide (RHEL/Fedora)" + else + # Debian/Ubuntu + sudo cp ca.pem /usr/local/share/ca-certificates/dex.crt + sudo update-ca-certificates + log_info "โœ“ CA certificate installed system-wide (Debian/Ubuntu)" + fi + fi + + # Add dex to /etc/hosts if not already present + log_info "Checking /etc/hosts for dex entry..." + if ! grep -q "dex.dex.svc.cluster.local" /etc/hosts 2>/dev/null; then + log_warn "About to add 'dex.dex.svc.cluster.local' to /etc/hosts (requires sudo)" + echo "127.0.0.1 dex.dex.svc.cluster.local" | sudo tee -a /etc/hosts + log_info "โœ“ Added dex to /etc/hosts" + else + log_info "โœ“ dex.dex.svc.cluster.local already in /etc/hosts" + fi + + log_info "โœ“ Dex deployed" +} + +# Step 3: Deploy jumpstarter controller +deploy_controller() { + log_info "Deploying jumpstarter controller..." + + cd "$REPO_ROOT" + + # Deploy with modified values using EXTRA_VALUES environment variable + log_info "Deploying controller with CA certificate..." + EXTRA_VALUES="--values $REPO_ROOT/.e2e/values.kind.yaml" make -C controller deploy + + log_info "โœ“ Controller deployed" +} + +# Step 4: Install jumpstarter +install_jumpstarter() { + log_info "Installing jumpstarter..." + + cd "$REPO_ROOT" + + # Create virtual environment + uv venv + + # Install jumpstarter packages + uv pip install \ + ./python/packages/jumpstarter-cli \ + ./python/packages/jumpstarter-driver-composite \ + ./python/packages/jumpstarter-driver-power \ + ./python/packages/jumpstarter-driver-opendal + + log_info "โœ“ Jumpstarter installed" +} + +# Step 5: Setup test environment +setup_test_environment() { + log_info "Setting up test environment..." + + cd "$REPO_ROOT" + + # Get the controller endpoint + export ENDPOINT=$(helm get values jumpstarter --output json | jq -r '."jumpstarter-controller".grpc.endpoint') + log_info "Controller endpoint: $ENDPOINT" + + # Setup exporters directory + echo "Setting up exporters directory in /etc/jumpstarter/exporters..., will need permissions" + sudo mkdir -p /etc/jumpstarter/exporters + sudo chown "$USER" /etc/jumpstarter/exporters + + # Create service accounts + log_info "Creating service accounts..." + kubectl create -n "${JS_NAMESPACE}" sa test-client-sa + kubectl create -n "${JS_NAMESPACE}" sa test-exporter-sa + + # Create a marker file to indicate setup is complete + echo "ENDPOINT=$ENDPOINT" > "$REPO_ROOT/.e2e-setup-complete" + echo "JS_NAMESPACE=$JS_NAMESPACE" >> "$REPO_ROOT/.e2e-setup-complete" + echo "REPO_ROOT=$REPO_ROOT" >> "$REPO_ROOT/.e2e-setup-complete" + echo "SCRIPT_DIR=$SCRIPT_DIR" >> "$REPO_ROOT/.e2e-setup-complete" + + # Set SSL certificate paths for Python to use the generated CA + echo "SSL_CERT_FILE=$REPO_ROOT/ca.pem" >> "$REPO_ROOT/.e2e-setup-complete" + echo "REQUESTS_CA_BUNDLE=$REPO_ROOT/ca.pem" >> "$REPO_ROOT/.e2e-setup-complete" + + # Save BATS_LIB_PATH for test runs + echo "BATS_LIB_PATH=$BATS_LIB_PATH" >> "$REPO_ROOT/.e2e-setup-complete" + + log_info "โœ“ Test environment ready" +} + +# Main execution +main() { + log_info "=== Jumpstarter E2E Setup ===" + log_info "Namespace: $JS_NAMESPACE" + log_info "Repository Root: $REPO_ROOT" + log_info "Script Directory: $SCRIPT_DIR" + echo "" + + install_dependencies + echo "" + + deploy_dex + echo "" + + deploy_controller + echo "" + + install_jumpstarter + echo "" + + setup_test_environment + echo "" + + log_info "โœ“โœ“โœ“ Setup complete! โœ“โœ“โœ“" + log_info "" + log_info "To run tests:" + log_info " cd $REPO_ROOT" + log_info " bash e2e/run-e2e.sh" + log_info "" + log_info "Or use the Makefile:" + log_info " make e2e" +} + +# Run main function +main "$@" diff --git a/e2e/tests.bats b/e2e/tests.bats index e84f0630..703e15fc 100644 --- a/e2e/tests.bats +++ b/e2e/tests.bats @@ -1,5 +1,13 @@ JS_NAMESPACE="${JS_NAMESPACE:-jumpstarter-lab}" +# File to track bash wrapper process PIDs across tests +EXPORTER_PIDS_FILE="${BATS_RUN_TMPDIR:-/tmp}/exporter_pids.txt" + +setup_file() { + # Initialize the PIDs file at the start of all tests + echo "" > "$EXPORTER_PIDS_FILE" +} + setup() { bats_load_library bats-support bats_load_library bats-assert @@ -7,6 +15,47 @@ setup() { bats_require_minimum_version 1.5.0 } +# teardown_file runs once after all tests complete (requires bats-core 1.5.0+) +teardown_file() { + echo "" >&2 + echo "========================================" >&2 + echo "TEARDOWN_FILE RUNNING" >&2 + echo "========================================" >&2 + echo "=== Cleaning up exporter bash processes ===" >&2 + + # Read PIDs from file + if [ -f "$EXPORTER_PIDS_FILE" ]; then + local pids=$(cat "$EXPORTER_PIDS_FILE" | tr '\n' ' ') + echo "Tracked PIDs from file: $pids" >&2 + + while IFS= read -r pid; do + if [ -n "$pid" ]; then + echo "Checking PID $pid..." >&2 + if ps -p "$pid" > /dev/null 2>&1; then + echo " Killing PID $pid" >&2 + kill -9 "$pid" 2>/dev/null || true + else + echo " PID $pid already terminated" >&2 + fi + fi + done < "$EXPORTER_PIDS_FILE" + else + echo "No PIDs file found at $EXPORTER_PIDS_FILE" >&2 + fi + + echo "Checking for orphaned jmp processes..." >&2 + local orphans=$(pgrep -f "jmp run --exporter" 2>/dev/null | wc -l) + echo "Found $orphans orphaned jmp processes" >&2 + + # remove orphaned processes + pkill -9 -f "jmp run --exporter" 2>/dev/null || true + + # Clean up the PIDs file + rm -f "$EXPORTER_PIDS_FILE" + + echo "=== Cleanup complete ===" >&2 +} + wait_for_exporter() { # After a lease operation the exporter is disconnecting from controller and reconnecting. # The disconnect can take a short while so let's avoid catching the pre-disconnect state and early return @@ -69,11 +118,11 @@ wait_for_exporter() { --connector-id kubernetes \ --token $(kubectl create -n "${JS_NAMESPACE}" token test-exporter-sa) - go run github.com/mikefarah/yq/v4@latest -i ". * load(\"$GITHUB_ACTION_PATH/exporter.yaml\")" \ + go run github.com/mikefarah/yq/v4@latest -i ". * load(\"e2e/exporter.yaml\")" \ /etc/jumpstarter/exporters/test-exporter-oidc.yaml - go run github.com/mikefarah/yq/v4@latest -i ". * load(\"$GITHUB_ACTION_PATH/exporter.yaml\")" \ + go run github.com/mikefarah/yq/v4@latest -i ". * load(\"e2e/exporter.yaml\")" \ /etc/jumpstarter/exporters/test-exporter-sa.yaml - go run github.com/mikefarah/yq/v4@latest -i ". * load(\"$GITHUB_ACTION_PATH/exporter.yaml\")" \ + go run github.com/mikefarah/yq/v4@latest -i ". * load(\"e2e/exporter.yaml\")" \ /etc/jumpstarter/exporters/test-exporter-legacy.yaml jmp config client list @@ -86,19 +135,21 @@ while true; do jmp run --exporter test-exporter-oidc done EOF + echo "$!" >> "$EXPORTER_PIDS_FILE" cat <&- & while true; do jmp run --exporter test-exporter-sa done EOF + echo "$!" >> "$EXPORTER_PIDS_FILE" cat <&- & while true; do jmp run --exporter test-exporter-legacy done EOF - + echo "$!" >> "$EXPORTER_PIDS_FILE" wait_for_exporter } @@ -110,7 +161,7 @@ EOF # to verify that the client can operate without a config file JMP_NAMESPACE="${JS_NAMESPACE}" \ JMP_DRIVERS_ALLOW="*" \ - JMP_NAME=test-exporter-legacy \ + JMP_NAME=test-client-legacy \ JMP_ENDPOINT=$(kubectl get clients.jumpstarter.dev -n "${JS_NAMESPACE}" test-client-legacy -o 'jsonpath={.status.endpoint}') \ JMP_TOKEN=$(kubectl get secrets -n "${JS_NAMESPACE}" test-client-legacy-client -o 'jsonpath={.data.token}' | base64 -d) \ jmp shell --selector example.com/board=oidc j power on diff --git a/protocol/.github/workflows/lint.yaml b/protocol/.github/workflows/lint.yaml deleted file mode 100644 index 2492ddec..00000000 --- a/protocol/.github/workflows/lint.yaml +++ /dev/null @@ -1,20 +0,0 @@ -name: "Code Quality" -on: - workflow_dispatch: - push: - branches: - - main - pull_request: - -permissions: - contents: read - pull-requests: read -jobs: - lint: - runs-on: "ubuntu-latest" - steps: - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Running Linter - run: make lint diff --git a/python/.github/dependabot.yml b/python/.github/dependabot.yml deleted file mode 100644 index f33a02cd..00000000 --- a/python/.github/dependabot.yml +++ /dev/null @@ -1,12 +0,0 @@ -# To get started with Dependabot version updates, you'll need to specify which -# package ecosystems to update and where the package manifests are located. -# Please see the documentation for more information: -# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates -# https://containers.dev/guide/dependabot - -version: 2 -updates: - - package-ecosystem: "devcontainers" - directory: "/" - schedule: - interval: weekly diff --git a/python/.github/workflows/backport.yml b/python/.github/workflows/backport.yml deleted file mode 100644 index 1c423801..00000000 --- a/python/.github/workflows/backport.yml +++ /dev/null @@ -1,41 +0,0 @@ -# WARNING: -# When extending this action, be aware that $GITHUB_TOKEN allows write access to -# the GitHub repository. This means that it should not evaluate user input in a -# way that allows code injection. - -name: Backport - -on: - pull_request_target: - types: [closed, labeled] - -permissions: {} - -jobs: - backport: - name: Backport Pull Request - if: github.repository_owner == 'jumpstarter-dev' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name)) - runs-on: ubuntu-24.04 - steps: - # Use a GitHub App to create the PR so that CI gets triggered - # The App is scoped to Repository > Contents and Pull Requests: write for jumpstarter-dev - - uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2.0.2 - id: app-token - with: - app-id: ${{ secrets.JUMPSTARTER_BACKPORT_BOT_APP_ID }} - private-key: ${{ secrets.JUMPSTARTER_BACKPORT_BOT_PRIVATE_KEY }} - - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - ref: ${{ github.event.pull_request.head.sha }} - token: ${{ steps.app-token.outputs.token }} - - - name: Create backport PRs - uses: korthout/backport-action@436145e922f9561fc5ea157ff406f21af2d6b363 # v3.2.0 - with: - # Config README: https://github.com/korthout/backport-action#backport-action - github_token: ${{ steps.app-token.outputs.token }} - conflict_resolution: draft_commit_conflicts - merge_commits: skip - pull_description: |- - Bot-based backport to `${target_branch}`, triggered by a label in #${pull_number}. diff --git a/python/.github/workflows/build.yaml b/python/.github/workflows/build.yaml deleted file mode 100644 index f8a2520b..00000000 --- a/python/.github/workflows/build.yaml +++ /dev/null @@ -1,111 +0,0 @@ -name: Build and push container image -on: - workflow_dispatch: - push: - branches: - - main - - release-* - tags: - - v* - merge_group: - -env: - PUSH: ${{ github.repository_owner == 'jumpstarter-dev' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/release-')) }} - REGISTRY: quay.io - QUAY_ORG: quay.io/jumpstarter-dev - -jobs: - build-and-push-image: - strategy: - matrix: - image: - - jumpstarter-dev/jumpstarter Dockerfile - - jumpstarter-dev/jumpstarter-utils Dockerfile.utils - - jumpstarter-dev/jumpstarter-dev .devfile/Containerfile - - jumpstarter-dev/jumpstarter-devspace .devfile/Containerfile.client - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - attestations: write - id-token: write - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Get image name and container file - run: | - IMAGE="${{ matrix.image }}" - IMAGE_NAME=$(echo $IMAGE | awk '{print $1}') - CONTAINERFILE=$(echo $IMAGE | awk '{print $2}') - echo "IMAGE_NAME=${IMAGE_NAME}" >> $GITHUB_ENV - echo "IMAGE_NAME=${IMAGE_NAME}" - echo "CONTAINERFILE=${CONTAINERFILE}" >> $GITHUB_ENV - echo "CONTAINERFILE=${CONTAINERFILE}" - - - name: Get version - if: ${{ env.PUSH == 'true' }} - run: | - VERSION=$(git describe --tags) - VERSION=${VERSION#v} # remove the leading v prefix for version - echo "VERSION=${VERSION}" >> $GITHUB_ENV - echo "VERSION=${VERSION}" - - - name: Set image tags - if: ${{ env.PUSH == 'true' }} - id: set-tags - run: | - TAGS="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.VERSION }}" - - if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then - TAGS="$TAGS,${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest" - fi - - if [[ "${{ github.ref }}" == refs/heads/release-* ]]; then - RELEASE_BRANCH_NAME=$(basename "${{ github.ref }}") - TAGS="$TAGS,${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${RELEASE_BRANCH_NAME}" - fi - - echo "tags=$TAGS" >> $GITHUB_OUTPUT - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Log in to the Container registry - uses: docker/login-action@v3 - if: ${{ env.PUSH == 'true' }} - with: - registry: ${{ env.REGISTRY }} - username: jumpstarter-dev+jumpstarter_ci - password: ${{ secrets.QUAY_TOKEN }} - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - - - name: Build and push Docker image - id: push - uses: docker/build-push-action@v6 - with: - context: . - file: ${{ env.CONTAINERFILE }} - push: ${{ env.PUSH }} - tags: ${{ steps.set-tags.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - platforms: linux/amd64,linux/arm64 - cache-from: type=gha - cache-to: type=gha,mode=max - - - name: Generate artifact attestation - uses: actions/attest-build-provenance@v1 - if: ${{ env.PUSH == 'true' }} - with: - subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - subject-digest: ${{ steps.push.outputs.digest }} - push-to-registry: ${{ env.PUSH }} diff --git a/python/.github/workflows/e2e.yaml b/python/.github/workflows/e2e.yaml deleted file mode 100644 index 0e79a657..00000000 --- a/python/.github/workflows/e2e.yaml +++ /dev/null @@ -1,24 +0,0 @@ -name: "Run E2E Tests" -on: - workflow_dispatch: - push: - branches: - - main - - release-* - pull_request: - merge_group: - -permissions: - contents: read - -jobs: - e2e: - if: github.repository_owner == 'jumpstarter-dev' - runs-on: ubuntu-latest - timeout-minutes: 60 - continue-on-error: false - steps: - - uses: jumpstarter-dev/jumpstarter-e2e@main - with: - controller-ref: main - jumpstarter-ref: ${{ github.ref }} diff --git a/python/.github/workflows/ruff.yaml b/python/.github/workflows/ruff.yaml deleted file mode 100644 index 9028ec4b..00000000 --- a/python/.github/workflows/ruff.yaml +++ /dev/null @@ -1,23 +0,0 @@ -name: Lint - -on: - workflow_dispatch: - push: - branches: - - main - - release-* - pull_request: - merge_group: - -permissions: - contents: read - -jobs: - ruff: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Run ruff - uses: astral-sh/ruff-action@84f83ecf9e1e15d26b7984c7ec9cf73d39ffc946 # v3.3.1 - with: - version-file: pyproject.toml diff --git a/python/.github/workflows/typos.yaml b/python/.github/workflows/typos.yaml deleted file mode 100644 index 33a08716..00000000 --- a/python/.github/workflows/typos.yaml +++ /dev/null @@ -1,21 +0,0 @@ -name: Spell Check - -on: - workflow_dispatch: - push: - branches: - - main - - release-* - pull_request: - merge_group: - -permissions: - contents: read - -jobs: - typos: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Run typos - uses: crate-ci/typos@0f0ccba9ed1df83948f0c15026e4f5ccfce46109 # v1.32.0 From d2e609cfc6070db4166cd7fe0cabc905007c8e02 Mon Sep 17 00:00:00 2001 From: Miguel Angel Ajo Pelayo Date: Wed, 21 Jan 2026 22:21:22 +0100 Subject: [PATCH 06/30] Update import_pr.sh with bug fixes --- import_pr.sh | 104 +++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 76 insertions(+), 28 deletions(-) diff --git a/import_pr.sh b/import_pr.sh index c945f37b..06853832 100755 --- a/import_pr.sh +++ b/import_pr.sh @@ -24,13 +24,27 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" TEMP_DIR="${SCRIPT_DIR}/.import-pr-temp" PATCH_DIR="${TEMP_DIR}/patches" -# Repository mappings: repo_name -> "github_repo subdir" -declare -A REPO_MAP=( - ["python"]="jumpstarter-dev/jumpstarter python" - ["protocol"]="jumpstarter-dev/jumpstarter-protocol protocol" - ["controller"]="jumpstarter-dev/jumpstarter-controller controller" - ["e2e"]="jumpstarter-dev/jumpstarter-e2e e2e" -) +# Repository mapping function (compatible with bash 3.2+) +get_repo_info() { + local repo_name="$1" + case "$repo_name" in + python) + echo "jumpstarter-dev/jumpstarter python" + ;; + protocol) + echo "jumpstarter-dev/jumpstarter-protocol protocol" + ;; + controller) + echo "jumpstarter-dev/jumpstarter-controller controller" + ;; + e2e) + echo "jumpstarter-dev/jumpstarter-e2e e2e" + ;; + *) + echo "" + ;; + esac +} # Colors for output RED='\033[0;31m' @@ -127,7 +141,9 @@ validate_args() { local pr_number="$2" # Validate repo name - if [ -z "${REPO_MAP[$repo]}" ]; then + local repo_info + repo_info=$(get_repo_info "$repo") + if [ -z "$repo_info" ]; then log_error "Invalid repository name: ${repo}" echo "Valid options are: python, protocol, controller, e2e" exit 1 @@ -182,12 +198,9 @@ clone_and_checkout_pr() { local clone_dir="${TEMP_DIR}/repo" - # Clone the repository + # Clone the repository (full clone needed for patch generation) log_info "Cloning ${github_repo}..." - gh repo clone "${github_repo}" "${clone_dir}" -- --depth=1 --no-single-branch 2>/dev/null || { - # If shallow clone fails, try full clone - gh repo clone "${github_repo}" "${clone_dir}" - } + gh repo clone "${github_repo}" "${clone_dir}" cd "${clone_dir}" @@ -195,9 +208,9 @@ clone_and_checkout_pr() { log_info "Checking out PR #${pr_number}..." gh pr checkout "${pr_number}" --repo "${github_repo}" - # Fetch the base branch to ensure we have it - log_info "Fetching base branch (${PR_BASE_BRANCH})..." - git fetch origin "${PR_BASE_BRANCH}" + # Ensure we have the full history of both branches for finding merge base + log_info "Fetching base branch with full history..." + git fetch --unshallow origin "${PR_BASE_BRANCH}" 2>/dev/null || git fetch origin "${PR_BASE_BRANCH}" CLONE_DIR="${clone_dir}" } @@ -206,29 +219,63 @@ clone_and_checkout_pr() { generate_patches() { log_step "Generating patches..." - cd "${CLONE_DIR}" + cd "${CLONE_DIR}" || { + log_error "Failed to cd to ${CLONE_DIR}" + exit 1 + } # Find the merge base between the PR branch and the base branch local merge_base - merge_base=$(git merge-base "origin/${PR_BASE_BRANCH}" HEAD) + if ! merge_base=$(git merge-base "origin/${PR_BASE_BRANCH}" HEAD 2>&1); then + log_error "Failed to find merge base: ${merge_base}" + exit 1 + fi log_info "Merge base: ${merge_base}" - # Count commits to be patched - local commit_count - commit_count=$(git rev-list --count "${merge_base}..HEAD") - log_info "Commits to import: ${commit_count}" + # Count all commits (including merges) + local total_commits + if ! total_commits=$(git rev-list --count "${merge_base}..HEAD" 2>&1); then + log_error "Failed to count commits: ${total_commits}" + exit 1 + fi + + # Count non-merge commits + local non_merge_commits + if ! non_merge_commits=$(git rev-list --count --no-merges "${merge_base}..HEAD" 2>&1); then + log_error "Failed to count non-merge commits: ${non_merge_commits}" + exit 1 + fi + + log_info "Total commits: ${total_commits} (${non_merge_commits} non-merge)" - if [ "$commit_count" -eq 0 ]; then - log_error "No commits found between merge base and HEAD." + if [ "$non_merge_commits" -eq 0 ]; then + log_error "No non-merge commits found between merge base and HEAD." exit 1 fi + + # Check if there are merge commits + local merge_commits=$((total_commits - non_merge_commits)) + if [ "$merge_commits" -gt 0 ]; then + log_warn "PR contains ${merge_commits} merge commit(s) which will be skipped." + log_warn "Only the ${non_merge_commits} non-merge commits will be imported." + fi - # Generate patches - git format-patch -o "${PATCH_DIR}" "${merge_base}..HEAD" + # Generate patches (skip merge commits) + log_info "Generating patches for non-merge commits..." + if ! git format-patch --no-merges -o "${PATCH_DIR}" "${merge_base}..HEAD"; then + log_error "Failed to generate patches." + exit 1 + fi # Count generated patches - PATCH_COUNT=$(ls -1 "${PATCH_DIR}"/*.patch 2>/dev/null | wc -l | tr -d ' ') + PATCH_COUNT=$(find "${PATCH_DIR}" -name "*.patch" 2>/dev/null | wc -l | tr -d ' ') + + if [ "$PATCH_COUNT" -eq 0 ]; then + log_error "No patches were generated." + exit 1 + fi + log_info "Generated ${PATCH_COUNT} patch file(s)." } @@ -354,7 +401,8 @@ main() { echo "" # Parse repo mapping - local repo_info="${REPO_MAP[$repo_name]}" + local repo_info + repo_info=$(get_repo_info "$repo_name") local github_repo subdir read -r github_repo subdir <<< "${repo_info}" From 7242cd65898c49a51c0725bb6e9329c3f232b309 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Thu, 4 Sep 2025 18:40:47 -0400 Subject: [PATCH 07/30] Add pre/post lease hooks --- .../jumpstarter/config/exporter.py | 26 +- .../jumpstarter/config/exporter_test.py | 52 +++ .../jumpstarter/exporter/exporter.py | 78 ++++- .../jumpstarter/jumpstarter/exporter/hooks.py | 141 ++++++++ .../jumpstarter/exporter/hooks_test.py | 319 ++++++++++++++++++ 5 files changed, 612 insertions(+), 4 deletions(-) create mode 100644 python/packages/jumpstarter/jumpstarter/exporter/hooks.py create mode 100644 python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py diff --git a/python/packages/jumpstarter/jumpstarter/config/exporter.py b/python/packages/jumpstarter/jumpstarter/config/exporter.py index efd4724b..893f0449 100644 --- a/python/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/config/exporter.py @@ -18,6 +18,16 @@ from jumpstarter.driver import Driver +class HookConfigV1Alpha1(BaseModel): + """Configuration for lifecycle hooks.""" + + model_config = ConfigDict(populate_by_name=True) + + pre_lease: str | None = Field(default=None, alias="preLease") + post_lease: str | None = Field(default=None, alias="postLease") + timeout: int = Field(default=300, description="Hook execution timeout in seconds") + + class ExporterConfigV1Alpha1DriverInstanceProxy(BaseModel): ref: str @@ -93,6 +103,7 @@ class ExporterConfigV1Alpha1(BaseModel): description: str | None = None export: dict[str, ExporterConfigV1Alpha1DriverInstance] = Field(default_factory=dict) + hooks: HookConfigV1Alpha1 = Field(default_factory=HookConfigV1Alpha1) path: Path | None = Field(default=None) @@ -127,7 +138,7 @@ def list(cls) -> ExporterConfigListV1Alpha1: @classmethod def dump_yaml(self, config: Self) -> str: - return yaml.safe_dump(config.model_dump(mode="json", exclude={"alias", "path"}), sort_keys=False) + return yaml.safe_dump(config.model_dump(mode="json", by_alias=True, exclude={"alias", "path"}), sort_keys=False) @classmethod def save(cls, config: Self, path: Optional[str] = None) -> Path: @@ -138,7 +149,7 @@ def save(cls, config: Self, path: Optional[str] = None) -> Path: else: config.path = Path(path) with config.path.open(mode="w") as f: - yaml.safe_dump(config.model_dump(mode="json", exclude={"alias", "path"}), f, sort_keys=False) + yaml.safe_dump(config.model_dump(mode="json", by_alias=True, exclude={"alias", "path"}), f, sort_keys=False) return config.path @classmethod @@ -185,6 +196,16 @@ async def channel_factory(): ) return aio_secure_channel(self.endpoint, credentials, self.grpcOptions) + # Create hook executor if hooks are configured + hook_executor = None + if self.hooks.pre_lease or self.hooks.post_lease: + from jumpstarter.exporter.hooks import HookExecutor + + hook_executor = HookExecutor( + config=self.hooks, + device_factory=ExporterConfigV1Alpha1DriverInstance(children=self.export).instantiate, + ) + exporter = None entered = False try: @@ -197,6 +218,7 @@ async def channel_factory(): ).instantiate, tls=self.tls, grpc_options=self.grpcOptions, + hook_executor=hook_executor, ) # Initialize the exporter (registration, etc.) await exporter.__aenter__() diff --git a/python/packages/jumpstarter/jumpstarter/config/exporter_test.py b/python/packages/jumpstarter/jumpstarter/config/exporter_test.py index e9fb4863..eebce783 100644 --- a/python/packages/jumpstarter/jumpstarter/config/exporter_test.py +++ b/python/packages/jumpstarter/jumpstarter/config/exporter_test.py @@ -101,3 +101,55 @@ def test_exporter_config(monkeypatch: pytest.MonkeyPatch, tmp_path: Path): ExporterConfigV1Alpha1.save(config) assert config == ExporterConfigV1Alpha1.load("test") + + +def test_exporter_config_with_hooks(monkeypatch: pytest.MonkeyPatch, tmp_path: Path): + monkeypatch.setattr(ExporterConfigV1Alpha1, "BASE_PATH", tmp_path) + + path = tmp_path / "test-hooks.yaml" + + text = """apiVersion: jumpstarter.dev/v1alpha1 +kind: ExporterConfig +metadata: + namespace: default + name: test-hooks +endpoint: "jumpstarter.my-lab.com:1443" +token: "test-token" +hooks: + preLease: | + echo "Pre-lease hook for $LEASE_NAME" + j power on + postLease: | + echo "Post-lease hook for $LEASE_NAME" + j power off + timeout: 600 +export: + power: + type: "jumpstarter_driver_power.driver.PduPower" +""" + path.write_text( + text, + encoding="utf-8", + ) + + config = ExporterConfigV1Alpha1.load("test-hooks") + + assert config.hooks.pre_lease == 'echo "Pre-lease hook for $LEASE_NAME"\nj power on\n' + assert config.hooks.post_lease == 'echo "Post-lease hook for $LEASE_NAME"\nj power off\n' + assert config.hooks.timeout == 600 + + # Test that it round-trips correctly + path.unlink() + ExporterConfigV1Alpha1.save(config) + reloaded_config = ExporterConfigV1Alpha1.load("test-hooks") + + assert reloaded_config.hooks.pre_lease == config.hooks.pre_lease + assert reloaded_config.hooks.post_lease == config.hooks.post_lease + assert reloaded_config.hooks.timeout == config.hooks.timeout + + # Test that the YAML uses camelCase + yaml_output = ExporterConfigV1Alpha1.dump_yaml(config) + assert "preLease:" in yaml_output + assert "postLease:" in yaml_output + assert "pre_lease:" not in yaml_output + assert "post_lease:" not in yaml_output diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index a33a6a9b..d4aebd68 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -8,6 +8,7 @@ from anyio import ( AsyncContextManagerMixin, CancelScope, + Event, connect_unix, create_memory_object_stream, create_task_group, @@ -25,6 +26,7 @@ from jumpstarter.common.streams import connect_router_stream from jumpstarter.config.tls import TLSConfigV1Alpha1 from jumpstarter.driver import Driver +from jumpstarter.exporter.hooks import HookContext, HookExecutor from jumpstarter.exporter.session import Session logger = logging.getLogger(__name__) @@ -37,11 +39,14 @@ class Exporter(AsyncContextManagerMixin, Metadata): lease_name: str = field(init=False, default="") tls: TLSConfigV1Alpha1 = field(default_factory=TLSConfigV1Alpha1) grpc_options: dict[str, str] = field(default_factory=dict) + hook_executor: HookExecutor | None = field(default=None) registered: bool = field(init=False, default=False) _unregister: bool = field(init=False, default=False) _stop_requested: bool = field(init=False, default=False) _started: bool = field(init=False, default=False) _tg: TaskGroup | None = field(init=False, default=None) + _current_client_name: str = field(init=False, default="") + _pre_lease_ready: Event | None = field(init=False, default=None) def stop(self, wait_for_lease_exit=False, should_unregister=False): """Signal the exporter to stop. @@ -51,7 +56,6 @@ def stop(self, wait_for_lease_exit=False, should_unregister=False): should_unregister (bool): If True, unregister from controller. Otherwise rely on heartbeat. """ - # Stop immediately if not started yet or if immediate stop is requested if (not self._started or not wait_for_lease_exit) and self._tg is not None: logger.info("Stopping exporter immediately, unregister from controller=%s", should_unregister) self._unregister = should_unregister @@ -148,6 +152,12 @@ async def listen(retries=5, backoff=3): tg.start_soon(listen) + # Wait for pre-lease hook to complete before processing connections + if self._pre_lease_ready is not None: + logger.info("Waiting for pre-lease hook to complete before accepting connections") + await self._pre_lease_ready.wait() + logger.info("Pre-lease hook completed, now accepting connections") + async with self.session() as path: async for request in listen_rx: logger.info("Handling new connection request on lease %s", lease_name) @@ -190,19 +200,83 @@ async def status(retries=5, backoff=3): tg.start_soon(status) async for status in status_rx: if self.lease_name != "" and self.lease_name != status.lease_name: + # Post-lease hook for the previous lease + if self.hook_executor and self._current_client_name: + hook_context = HookContext( + lease_name=self.lease_name, + client_name=self._current_client_name, + ) + # Shield the post-lease hook from cancellation and await it + with CancelScope(shield=True): + await self.hook_executor.execute_post_lease_hook(hook_context) + self.lease_name = status.lease_name logger.info("Lease status changed, killing existing connections") + # Reset event for next lease + self._pre_lease_ready = None self.stop() break + + # Check for lease state transitions + previous_leased = hasattr(self, "_previous_leased") and self._previous_leased + current_leased = status.leased + self.lease_name = status.lease_name if not self._started and self.lease_name != "": self._started = True + # Create event for pre-lease synchronization + self._pre_lease_ready = Event() tg.start_soon(self.handle, self.lease_name, tg) - if status.leased: + + if current_leased: logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) + self._current_client_name = status.client_name + + # Pre-lease hook when transitioning from unleased to leased + if not previous_leased: + if self.hook_executor: + hook_context = HookContext( + lease_name=status.lease_name, + client_name=status.client_name, + ) + + # Start pre-lease hook asynchronously + async def run_pre_lease_hook(): + try: + await self.hook_executor.execute_pre_lease_hook(hook_context) + logger.info("Pre-lease hook completed successfully") + except Exception as e: + logger.error("Pre-lease hook failed: %s", e) + finally: + # Always set the event to unblock connections + if self._pre_lease_ready: + self._pre_lease_ready.set() + + tg.start_soon(run_pre_lease_hook) + else: + # No hook configured, set event immediately + if self._pre_lease_ready: + self._pre_lease_ready.set() else: logger.info("Currently not leased") + + # Post-lease hook when transitioning from leased to unleased + if previous_leased and self.hook_executor and self._current_client_name: + hook_context = HookContext( + lease_name=self.lease_name, + client_name=self._current_client_name, + ) + # Shield the post-lease hook from cancellation and await it + with CancelScope(shield=True): + await self.hook_executor.execute_post_lease_hook(hook_context) + + self._current_client_name = "" + # Reset event for next lease + self._pre_lease_ready = None + if self._stop_requested: self.stop(should_unregister=True) break + + self._previous_leased = current_leased self._tg = None diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py new file mode 100644 index 00000000..16318ce3 --- /dev/null +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -0,0 +1,141 @@ +"""Lifecycle hooks for Jumpstarter exporters.""" + +import asyncio +import logging +import os +from contextlib import asynccontextmanager +from dataclasses import dataclass, field +from typing import Callable + +from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST +from jumpstarter.config.exporter import HookConfigV1Alpha1 +from jumpstarter.driver import Driver +from jumpstarter.exporter.session import Session + +logger = logging.getLogger(__name__) + + +@dataclass(kw_only=True) +class HookContext: + """Context information passed to hooks.""" + + lease_name: str + client_name: str = "" + lease_duration: str = "" + exporter_name: str = "" + exporter_namespace: str = "" + + +@dataclass(kw_only=True) +class HookExecutor: + """Executes lifecycle hooks with access to the j CLI.""" + + config: HookConfigV1Alpha1 + device_factory: Callable[[], Driver] + timeout: int = field(init=False) + + def __post_init__(self): + self.timeout = self.config.timeout + + @asynccontextmanager + async def _create_hook_environment(self, context: HookContext): + """Create a local session and Unix socket for j CLI access.""" + with Session( + root_device=self.device_factory(), + # Use hook context for metadata + labels={ + "jumpstarter.dev/hook-context": "true", + "jumpstarter.dev/lease": context.lease_name, + }, + ) as session: + async with session.serve_unix_async() as unix_path: + # Create environment variables for the hook + hook_env = os.environ.copy() + hook_env.update( + { + JUMPSTARTER_HOST: str(unix_path), + JMP_DRIVERS_ALLOW: "UNSAFE", # Allow all drivers for local access + "LEASE_NAME": context.lease_name, + "CLIENT_NAME": context.client_name, + "LEASE_DURATION": context.lease_duration, + "EXPORTER_NAME": context.exporter_name, + "EXPORTER_NAMESPACE": context.exporter_namespace, + } + ) + + yield hook_env + + async def _execute_hook(self, command: str, context: HookContext) -> bool: + """Execute a single hook command.""" + if not command or not command.strip(): + logger.debug("Hook command is empty, skipping") + return True + + logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) + + async with self._create_hook_environment(context) as hook_env: + try: + # Execute the hook command using shell + process = await asyncio.create_subprocess_shell( + command, + env=hook_env, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.STDOUT, + ) + + try: + # Stream output line-by-line for real-time logging + output_lines = [] + + async def read_output(): + while True: + line = await process.stdout.readline() + if not line: + break + line_decoded = line.decode().rstrip() + output_lines.append(line_decoded) + logger.info("[Hook Output] %s", line_decoded) + + # Run output reading and process waiting concurrently with timeout + await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=self.timeout) + + if process.returncode == 0: + logger.info("Hook executed successfully") + return True + else: + logger.error("Hook failed with return code %d", process.returncode) + if output_lines: + logger.error("Hook output: %s", "\n".join(output_lines)) + return False + + except asyncio.TimeoutError: + logger.error("Hook timed out after %d seconds", self.timeout) + try: + process.terminate() + await asyncio.wait_for(process.wait(), timeout=5) + except asyncio.TimeoutError: + process.kill() + await process.wait() + return False + + except Exception as e: + logger.error("Error executing hook: %s", e, exc_info=True) + return False + + async def execute_pre_lease_hook(self, context: HookContext) -> bool: + """Execute the pre-lease hook.""" + if not self.config.pre_lease: + logger.debug("No pre-lease hook configured") + return True + + logger.info("Executing pre-lease hook for lease %s", context.lease_name) + return await self._execute_hook(self.config.pre_lease, context) + + async def execute_post_lease_hook(self, context: HookContext) -> bool: + """Execute the post-lease hook.""" + if not self.config.post_lease: + logger.debug("No post-lease hook configured") + return True + + logger.info("Executing post-lease hook for lease %s", context.lease_name) + return await self._execute_hook(self.config.post_lease, context) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py new file mode 100644 index 00000000..576ca681 --- /dev/null +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -0,0 +1,319 @@ +import asyncio +from unittest.mock import AsyncMock, Mock, call, patch + +import pytest + +from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST +from jumpstarter.config.exporter import HookConfigV1Alpha1 +from jumpstarter.driver import Driver +from jumpstarter.exporter.hooks import HookContext, HookExecutor + +pytestmark = pytest.mark.anyio + + +class MockDriver(Driver): + @classmethod + def client(cls) -> str: + return "test.MockClient" + + def close(self): + pass + + def reset(self): + pass + + +@pytest.fixture +def mock_device_factory(): + def factory(): + return MockDriver() + + return factory + + +@pytest.fixture +def hook_config(): + return HookConfigV1Alpha1( + pre_lease="echo 'Pre-lease hook executed'", + post_lease="echo 'Post-lease hook executed'", + timeout=10, + ) + + +@pytest.fixture +def hook_context(): + return HookContext( + lease_name="test-lease-123", + client_name="test-client", + lease_duration="30m", + exporter_name="test-exporter", + exporter_namespace="default", + ) + + +class TestHookExecutor: + async def test_hook_executor_creation(self, hook_config, mock_device_factory): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + assert executor.config == hook_config + assert executor.device_factory == mock_device_factory + assert executor.timeout == 10 + + async def test_empty_hook_execution(self, mock_device_factory, hook_context): + empty_config = HookConfigV1Alpha1() + executor = HookExecutor( + config=empty_config, + device_factory=mock_device_factory, + ) + + # Both hooks should return True for empty/None commands + assert await executor.execute_pre_lease_hook(hook_context) is True + assert await executor.execute_post_lease_hook(hook_context) is True + + async def test_successful_hook_execution(self, mock_device_factory, hook_context): + hook_config = HookConfigV1Alpha1( + pre_lease="echo 'Pre-lease hook executed'", + timeout=10, + ) + # Mock the Session and serve_unix_async + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + # Mock the async context manager for serve_unix_async + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + # Mock asyncio.create_subprocess_shell to simulate successful execution + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock stdout.readline to simulate line-by-line output + mock_process.stdout.readline.side_effect = [ + b"Pre-lease hook executed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_pre_lease_hook(hook_context) + + assert result is True + + # Verify subprocess was called with correct environment + mock_subprocess.assert_called_once() + call_args = mock_subprocess.call_args + command = call_args[0][0] + env = call_args[1]["env"] + + assert command == "echo 'Pre-lease hook executed'" + assert JUMPSTARTER_HOST in env + assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" + assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" + assert env["LEASE_NAME"] == "test-lease-123" + assert env["CLIENT_NAME"] == "test-client" + + async def test_failed_hook_execution(self, mock_device_factory, hook_context): + failed_config = HookConfigV1Alpha1( + pre_lease="exit 1", # Command that will fail + timeout=10, + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + # Mock failed process + mock_process = AsyncMock() + mock_process.returncode = 1 + # Mock stdout.readline for failed process + mock_process.stdout.readline.side_effect = [ + b"Command failed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor( + config=failed_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_pre_lease_hook(hook_context) + + assert result is False + + async def test_hook_timeout(self, mock_device_factory, hook_context): + timeout_config = HookConfigV1Alpha1( + pre_lease="sleep 60", # Command that will timeout + timeout=1, # 1 second timeout + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + # Mock process that times out + mock_process = AsyncMock() + mock_process.terminate.return_value = None + mock_process.wait.return_value = None + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + ): + executor = HookExecutor( + config=timeout_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_pre_lease_hook(hook_context) + + assert result is False + mock_process.terminate.assert_called_once() + + async def test_hook_environment_variables(self, mock_device_factory, hook_context): + hook_config = HookConfigV1Alpha1( + pre_lease="echo 'Pre-lease hook executed'", + timeout=10, + ) + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock stdout.readline for environment test + mock_process.stdout.readline.side_effect = [ + b"", # EOF (no output) + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + await executor.execute_pre_lease_hook(hook_context) + + # Check that all expected environment variables are set + call_args = mock_subprocess.call_args + env = call_args[1]["env"] + + assert env["LEASE_NAME"] == "test-lease-123" + assert env["CLIENT_NAME"] == "test-client" + assert env["LEASE_DURATION"] == "30m" + assert env["EXPORTER_NAME"] == "test-exporter" + assert env["EXPORTER_NAMESPACE"] == "default" + assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" + assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" + + async def test_real_time_output_logging(self, mock_device_factory, hook_context): + """Test that hook output is logged in real-time at INFO level.""" + hook_config = HookConfigV1Alpha1( + pre_lease="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", + timeout=10, + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock multiple lines of output to verify streaming + mock_process.stdout.readline.side_effect = [ + b"Line 1\n", + b"Line 2\n", + b"Line 3\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + # Mock the logger to capture log calls + with ( + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + patch("asyncio.create_subprocess_shell", return_value=mock_process), + ): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_pre_lease_hook(hook_context) + + assert result is True + + # Verify that output lines were logged in real-time at INFO level + expected_calls = [ + call.info("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), + call.info("[Hook Output] %s", "Line 1"), + call.info("[Hook Output] %s", "Line 2"), + call.info("[Hook Output] %s", "Line 3"), + call.info("Hook executed successfully"), + ] + mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, hook_context): + """Test that post-lease hook executes when called directly.""" + hook_config = HookConfigV1Alpha1( + post_lease="echo 'Post-lease cleanup completed'", + timeout=10, + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock post-lease hook output + mock_process.stdout.readline.side_effect = [ + b"Post-lease cleanup completed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + # Mock the logger to capture log calls + with patch("jumpstarter.exporter.hooks.logger") as mock_logger, \ + patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_post_lease_hook(hook_context) + + assert result is True + + # Verify that post-lease hook output was logged + expected_calls = [ + call.info("Executing post-lease hook for lease %s", "test-lease-123"), + call.info("Executing hook: %s", "echo 'Post-lease cleanup completed'"), + call.info("[Hook Output] %s", "Post-lease cleanup completed"), + call.info("Hook executed successfully"), + ] + mock_logger.info.assert_has_calls(expected_calls, any_order=False) From 96e8421b0893af1306353174c875050fa507ed53 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 29 Sep 2025 11:00:32 -0400 Subject: [PATCH 08/30] Add enums and exporter status reporting --- .../jumpstarter-cli/jumpstarter_cli/get.py | 9 +- .../jumpstarter/jumpstarter/client/core.py | 30 ++++++- .../jumpstarter/jumpstarter/client/grpc.py | 26 +++++- .../jumpstarter/common/__init__.py | 10 ++- .../jumpstarter/jumpstarter/common/enums.py | 76 +++++++++++++++++ .../jumpstarter/jumpstarter/config/client.py | 4 +- .../jumpstarter/exporter/exporter.py | 82 ++++++++++++++----- .../jumpstarter/exporter/logging.py | 6 +- .../jumpstarter/exporter/session.py | 25 +++++- 9 files changed, 237 insertions(+), 31 deletions(-) create mode 100644 python/packages/jumpstarter/jumpstarter/common/enums.py diff --git a/python/packages/jumpstarter-cli/jumpstarter_cli/get.py b/python/packages/jumpstarter-cli/jumpstarter_cli/get.py index f7d1a041..869dfd06 100644 --- a/python/packages/jumpstarter-cli/jumpstarter_cli/get.py +++ b/python/packages/jumpstarter-cli/jumpstarter_cli/get.py @@ -21,8 +21,8 @@ def get(): @opt_output_all @opt_comma_separated( "with", - {"leases", "online"}, - help_text="Include fields: leases, online (comma-separated or repeated)" + {"leases", "online", "status"}, + help_text="Include fields: leases, online, status (comma-separated or repeated)", ) @handle_exceptions_with_reauthentication(relogin_client) def get_exporters(config, selector: str | None, output: OutputType, with_options: list[str]): @@ -32,7 +32,10 @@ def get_exporters(config, selector: str | None, output: OutputType, with_options include_leases = "leases" in with_options include_online = "online" in with_options - exporters = config.list_exporters(filter=selector, include_leases=include_leases, include_online=include_online) + include_status = "status" in with_options + exporters = config.list_exporters( + filter=selector, include_leases=include_leases, include_online=include_online, include_status=include_status + ) model_print(exporters, output) diff --git a/python/packages/jumpstarter/jumpstarter/client/core.py b/python/packages/jumpstarter/jumpstarter/client/core.py index 3befe92e..2f6491db 100644 --- a/python/packages/jumpstarter/jumpstarter/client/core.py +++ b/python/packages/jumpstarter/jumpstarter/client/core.py @@ -14,7 +14,7 @@ from jumpstarter_protocol import jumpstarter_pb2, jumpstarter_pb2_grpc, router_pb2_grpc from rich.logging import RichHandler -from jumpstarter.common import Metadata +from jumpstarter.common import ExporterStatus, Metadata from jumpstarter.common.exceptions import JumpstarterException from jumpstarter.common.resources import ResourceMetadata from jumpstarter.common.serde import decode_value, encode_value @@ -48,6 +48,12 @@ class DriverInvalidArgument(DriverError, ValueError): """ +class ExporterNotReady(DriverError): + """ + Raised when the exporter is not ready to accept driver calls + """ + + @dataclass(kw_only=True) class AsyncDriverClient( Metadata, @@ -76,9 +82,28 @@ def __post_init__(self): handler = RichHandler() self.logger.addHandler(handler) + async def check_exporter_status(self): + """Check if the exporter is ready to accept driver calls""" + try: + response = await self.stub.GetStatus(jumpstarter_pb2.GetStatusRequest()) + status = ExporterStatus.from_proto(response.status) + + if status != ExporterStatus.LEASE_READY: + raise ExporterNotReady(f"Exporter status is {status}: {response.status_message}") + + except AioRpcError as e: + # If GetStatus is not implemented, assume ready for backward compatibility + if e.code() == StatusCode.UNIMPLEMENTED: + self.logger.debug("GetStatus not implemented, assuming exporter is ready") + return + raise DriverError(f"Failed to check exporter status: {e.details()}") from e + async def call_async(self, method, *args): """Make DriverCall by method name and arguments""" + # Check exporter status before making the call + await self.check_exporter_status() + request = jumpstarter_pb2.DriverCallRequest( uuid=str(self.uuid), method=method, @@ -105,6 +130,9 @@ async def call_async(self, method, *args): async def streamingcall_async(self, method, *args): """Make StreamingDriverCall by method name and arguments""" + # Check exporter status before making the call + await self.check_exporter_status() + request = jumpstarter_pb2.StreamingDriverCallRequest( uuid=str(self.uuid), method=method, diff --git a/python/packages/jumpstarter/jumpstarter/client/grpc.py b/python/packages/jumpstarter/jumpstarter/client/grpc.py index 445f255c..0bfd06e9 100644 --- a/python/packages/jumpstarter/jumpstarter/client/grpc.py +++ b/python/packages/jumpstarter/jumpstarter/client/grpc.py @@ -13,6 +13,7 @@ from jumpstarter_protocol import client_pb2, client_pb2_grpc, jumpstarter_pb2_grpc, kubernetes_pb2, router_pb2_grpc from pydantic import BaseModel, ConfigDict, Field, field_serializer +from jumpstarter.common import ExporterStatus from jumpstarter.common.grpc import translate_grpc_exceptions @@ -20,6 +21,7 @@ class WithOptions: show_online: bool = False show_leases: bool = False + show_status: bool = False def add_display_columns(table, options: WithOptions = None): @@ -28,6 +30,8 @@ def add_display_columns(table, options: WithOptions = None): table.add_column("NAME") if options.show_online: table.add_column("ONLINE") + if options.show_status: + table.add_column("STATUS") table.add_column("LABELS") if options.show_leases: table.add_column("LEASED BY") @@ -42,6 +46,9 @@ def add_exporter_row(table, exporter, options: WithOptions = None, lease_info: t row_data.append(exporter.name) if options.show_online: row_data.append("yes" if exporter.online else "no") + if options.show_status: + status_str = str(exporter.status) if exporter.status else "UNKNOWN" + row_data.append(status_str) row_data.append(",".join(("{}={}".format(k, v) for k, v in sorted(exporter.labels.items())))) if options.show_leases: if lease_info: @@ -81,12 +88,16 @@ class Exporter(BaseModel): name: str labels: dict[str, str] online: bool = False + status: ExporterStatus | None = None lease: Lease | None = None @classmethod def from_protobuf(cls, data: client_pb2.Exporter) -> Exporter: namespace, name = parse_exporter_identifier(data.name) - return cls(namespace=namespace, name=name, labels=data.labels, online=data.online) + status = None + if hasattr(data, "status") and data.status: + status = ExporterStatus.from_proto(data.status) + return cls(namespace=namespace, name=name, labels=data.labels, online=data.online, status=status) @classmethod def rich_add_columns(cls, table, options: WithOptions = None): @@ -244,6 +255,7 @@ class ExporterList(BaseModel): next_page_token: str | None = Field(exclude=True) include_online: bool = Field(default=False, exclude=True) include_leases: bool = Field(default=False, exclude=True) + include_status: bool = Field(default=False, exclude=True) @classmethod def from_protobuf(cls, data: client_pb2.ListExportersResponse) -> ExporterList: @@ -253,11 +265,15 @@ def from_protobuf(cls, data: client_pb2.ListExportersResponse) -> ExporterList: ) def rich_add_columns(self, table): - options = WithOptions(show_online=self.include_online, show_leases=self.include_leases) + options = WithOptions( + show_online=self.include_online, show_leases=self.include_leases, show_status=self.include_status + ) Exporter.rich_add_columns(table, options) def rich_add_rows(self, table): - options = WithOptions(show_online=self.include_online, show_leases=self.include_leases) + options = WithOptions( + show_online=self.include_online, show_leases=self.include_leases, show_status=self.include_status + ) for exporter in self.exporters: exporter.rich_add_rows(table, options) @@ -274,6 +290,8 @@ def model_dump_json(self, **kwargs): exclude_fields.add("lease") if not self.include_online: exclude_fields.add("online") + if not self.include_status: + exclude_fields.add("status") data = {"exporters": [exporter.model_dump(mode="json", exclude=exclude_fields) for exporter in self.exporters]} return json.dumps(data, **json_kwargs) @@ -284,6 +302,8 @@ def model_dump(self, **kwargs): exclude_fields.add("lease") if not self.include_online: exclude_fields.add("online") + if not self.include_status: + exclude_fields.add("status") return {"exporters": [exporter.model_dump(mode="json", exclude=exclude_fields) for exporter in self.exporters]} diff --git a/python/packages/jumpstarter/jumpstarter/common/__init__.py b/python/packages/jumpstarter/jumpstarter/common/__init__.py index 13058cb0..08645b47 100644 --- a/python/packages/jumpstarter/jumpstarter/common/__init__.py +++ b/python/packages/jumpstarter/jumpstarter/common/__init__.py @@ -1,4 +1,12 @@ +from .enums import ExporterStatus, LogSource from .metadata import Metadata from .tempfile import TemporarySocket, TemporaryTcpListener, TemporaryUnixListener -__all__ = ["Metadata", "TemporarySocket", "TemporaryUnixListener", "TemporaryTcpListener"] +__all__ = [ + "ExporterStatus", + "LogSource", + "Metadata", + "TemporarySocket", + "TemporaryUnixListener", + "TemporaryTcpListener", +] diff --git a/python/packages/jumpstarter/jumpstarter/common/enums.py b/python/packages/jumpstarter/jumpstarter/common/enums.py new file mode 100644 index 00000000..ce6a79c2 --- /dev/null +++ b/python/packages/jumpstarter/jumpstarter/common/enums.py @@ -0,0 +1,76 @@ +"""Human-readable enum wrappers for protobuf-generated constants.""" + +from enum import IntEnum + +from jumpstarter_protocol.jumpstarter.v1 import common_pb2 + + +class ExporterStatus(IntEnum): + """Exporter status states.""" + + UNSPECIFIED = common_pb2.EXPORTER_STATUS_UNSPECIFIED + """Unknown/unspecified exporter status""" + + OFFLINE = common_pb2.EXPORTER_STATUS_OFFLINE + """The exporter is currently offline""" + + AVAILABLE = common_pb2.EXPORTER_STATUS_AVAILABLE + """Exporter is available to be leased""" + + BEFORE_LEASE_HOOK = common_pb2.EXPORTER_STATUS_BEFORE_LEASE_HOOK + """Exporter is leased, but currently executing before lease hook""" + + LEASE_READY = common_pb2.EXPORTER_STATUS_LEASE_READY + """Exporter is leased and ready to accept commands""" + + AFTER_LEASE_HOOK = common_pb2.EXPORTER_STATUS_AFTER_LEASE_HOOK + """Lease was releaseed, but exporter is executing after lease hook""" + + BEFORE_LEASE_HOOK_FAILED = common_pb2.EXPORTER_STATUS_BEFORE_LEASE_HOOK_FAILED + """The before lease hook failed and the exporter is no longer available""" + + AFTER_LEASE_HOOK_FAILED = common_pb2.EXPORTER_STATUS_AFTER_LEASE_HOOK_FAILED + """The after lease hook failed and the exporter is no longer available""" + + def __str__(self): + return self.name + + @classmethod + def from_proto(cls, value: int) -> "ExporterStatus": + """Convert from protobuf integer to enum.""" + return cls(value) + + def to_proto(self) -> int: + """Convert to protobuf integer.""" + return self.value + + +class LogSource(IntEnum): + """Log source types.""" + + UNSPECIFIED = common_pb2.LOG_SOURCE_UNSPECIFIED + """Unspecified/unknown log source""" + + DRIVER = common_pb2.LOG_SOURCE_DRIVER + """Logs produced by a Jumpstarter driver""" + + BEFORE_LEASE_HOOK = common_pb2.LOG_SOURCE_BEFORE_LEASE_HOOK + """Logs produced by a before lease hook""" + + AFTER_LEASE_HOOK = common_pb2.LOG_SOURCE_AFTER_LEASE_HOOK + """Logs produced by an after lease hook""" + + SYSTEM = common_pb2.LOG_SOURCE_SYSTEM + """System/exporter logs""" + + def __str__(self): + return self.name + + @classmethod + def from_proto(cls, value: int) -> "LogSource": + """Convert from protobuf integer to enum.""" + return cls(value) + + def to_proto(self) -> int: + """Convert to protobuf integer.""" + return self.value diff --git a/python/packages/jumpstarter/jumpstarter/config/client.py b/python/packages/jumpstarter/jumpstarter/config/client.py index 97f92c1e..c6fc2d91 100644 --- a/python/packages/jumpstarter/jumpstarter/config/client.py +++ b/python/packages/jumpstarter/jumpstarter/config/client.py @@ -160,12 +160,14 @@ async def list_exporters( filter: str | None = None, include_leases: bool = False, include_online: bool = False, + include_status: bool = False, ): svc = ClientService(channel=await self.channel(), namespace=self.metadata.namespace) exporters_response = await svc.ListExporters(page_size=page_size, page_token=page_token, filter=filter) - # Set the include_online flag for display purposes + # Set the include flags for display purposes exporters_response.include_online = include_online + exporters_response.include_status = include_status if not include_leases: return exporters_response diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index d4aebd68..1a48bece 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -22,7 +22,7 @@ jumpstarter_pb2_grpc, ) -from jumpstarter.common import Metadata +from jumpstarter.common import ExporterStatus, Metadata from jumpstarter.common.streams import connect_router_stream from jumpstarter.config.tls import TLSConfigV1Alpha1 from jumpstarter.driver import Driver @@ -47,6 +47,8 @@ class Exporter(AsyncContextManagerMixin, Metadata): _tg: TaskGroup | None = field(init=False, default=None) _current_client_name: str = field(init=False, default="") _pre_lease_ready: Event | None = field(init=False, default=None) + _current_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) + _current_session: Session | None = field(init=False, default=None) def stop(self, wait_for_lease_exit=False, should_unregister=False): """Signal the exporter to stop. @@ -56,6 +58,7 @@ def stop(self, wait_for_lease_exit=False, should_unregister=False): should_unregister (bool): If True, unregister from controller. Otherwise rely on heartbeat. """ + # Stop immediately if not started yet or if immediate stop is requested if (not self._started or not wait_for_lease_exit) and self._tg is not None: logger.info("Stopping exporter immediately, unregister from controller=%s", should_unregister) self._unregister = should_unregister @@ -64,6 +67,26 @@ def stop(self, wait_for_lease_exit=False, should_unregister=False): self._stop_requested = True logger.info("Exporter marked for stop upon lease exit") + async def _update_status(self, status: ExporterStatus, message: str = ""): + """Update exporter status with the controller and session.""" + self._current_status = status + + # Update session status if available + if self._current_session: + self._current_session.update_status(status, message) + + try: + controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) + await controller.UpdateStatus( + jumpstarter_pb2.UpdateStatusRequest( + status=status.to_proto(), + status_message=message, + ) + ) + logger.info(f"Updated status to {status}: {message}") + except Exception as e: + logger.error(f"Failed to update status: {e}") + @asynccontextmanager async def __asynccontextmanager__(self) -> AsyncGenerator[Self]: try: @@ -77,6 +100,7 @@ async def __asynccontextmanager__(self) -> AsyncGenerator[Self]: channel = await self.channel_factory() try: controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) + await self._update_status(ExporterStatus.OFFLINE, "Exporter shutting down") await controller.Unregister( jumpstarter_pb2.UnregisterRequest( reason="Exporter shutdown", @@ -109,20 +133,27 @@ async def session(self): labels=self.labels, root_device=self.device_factory(), ) as session: - async with session.serve_unix_async() as path: - async with grpc.aio.secure_channel( - f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) - ) as channel: - response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) - logger.info("Registering exporter with controller") - await controller.Register( - jumpstarter_pb2.RegisterRequest( - labels=self.labels, - reports=response.reports, + # Store session reference for status updates + self._current_session = session + try: + async with session.serve_unix_async() as path: + async with grpc.aio.secure_channel( + f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) + ) as channel: + response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) + logger.info("Registering exporter with controller") + await controller.Register( + jumpstarter_pb2.RegisterRequest( + labels=self.labels, + reports=response.reports, + ) ) - ) - self.registered = True - yield path + self.registered = True + await self._update_status(ExporterStatus.AVAILABLE, "Exporter registered and available") + yield path + finally: + # Clear session reference + self._current_session = None async def handle(self, lease_name, tg): logger.info("Listening for incoming connection requests on lease %s", lease_name) @@ -208,7 +239,9 @@ async def status(retries=5, backoff=3): ) # Shield the post-lease hook from cancellation and await it with CancelScope(shield=True): + await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") await self.hook_executor.execute_post_lease_hook(hook_context) + await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") self.lease_name = status.lease_name logger.info("Lease status changed, killing existing connections") @@ -241,20 +274,29 @@ async def status(retries=5, backoff=3): ) # Start pre-lease hook asynchronously - async def run_pre_lease_hook(): + async def run_before_lease_hook(hook_ctx): try: - await self.hook_executor.execute_pre_lease_hook(hook_context) - logger.info("Pre-lease hook completed successfully") + await self._update_status( + ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks" + ) + await self.hook_executor.execute_pre_lease_hook(hook_ctx) + await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") + logger.info("beforeLease hook completed successfully") except Exception as e: - logger.error("Pre-lease hook failed: %s", e) + logger.error("beforeLease hook failed: %s", e) + # Still transition to ready even if hook fails + await self._update_status( + ExporterStatus.LEASE_READY, f"Ready (beforeLease hook failed: {e})" + ) finally: # Always set the event to unblock connections if self._pre_lease_ready: self._pre_lease_ready.set() - tg.start_soon(run_pre_lease_hook) + tg.start_soon(run_before_lease_hook, hook_context) else: # No hook configured, set event immediately + await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") if self._pre_lease_ready: self._pre_lease_ready.set() else: @@ -268,7 +310,9 @@ async def run_pre_lease_hook(): ) # Shield the post-lease hook from cancellation and await it with CancelScope(shield=True): + await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") await self.hook_executor.execute_post_lease_hook(hook_context) + await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") self._current_client_name = "" # Reset event for next lease diff --git a/python/packages/jumpstarter/jumpstarter/exporter/logging.py b/python/packages/jumpstarter/jumpstarter/exporter/logging.py index 629306c2..8b73467d 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/logging.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/logging.py @@ -3,12 +3,15 @@ from jumpstarter_protocol import jumpstarter_pb2 +from jumpstarter.common import LogSource + class LogHandler(logging.Handler): - def __init__(self, queue: deque): + def __init__(self, queue: deque, source: LogSource = LogSource.UNSPECIFIED): logging.Handler.__init__(self) self.queue = queue self.listener = None + self.source = source # LogSource enum value def enqueue(self, record): self.queue.append(record) @@ -18,6 +21,7 @@ def prepare(self, record): uuid="", severity=record.levelname, message=self.format(record), + source=self.source.value, # Convert to proto value ) def emit(self, record): diff --git a/python/packages/jumpstarter/jumpstarter/exporter/session.py b/python/packages/jumpstarter/jumpstarter/exporter/session.py index 63ae2f08..f9f2340d 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/session.py @@ -17,7 +17,8 @@ ) from .logging import LogHandler -from jumpstarter.common import Metadata, TemporarySocket +from jumpstarter.common import ExporterStatus, Metadata, TemporarySocket +from jumpstarter.common.enums import LogSource from jumpstarter.common.streams import StreamRequestMetadata from jumpstarter.driver import Driver from jumpstarter.streams.common import forward_stream @@ -39,6 +40,9 @@ class Session( _logging_queue: deque = field(init=False) _logging_handler: QueueHandler = field(init=False) + _current_status: ExporterStatus = field(init=False, default=ExporterStatus.AVAILABLE) + _status_message: str = field(init=False, default="") + _status_update_event: Event = field(init=False) @contextmanager def __contextmanager__(self) -> Generator[Self]: @@ -67,7 +71,8 @@ def __init__(self, *args, root_device, **kwargs): self.mapping = {u: i for (u, _, _, i) in self.root_device.enumerate()} self._logging_queue = deque(maxlen=32) - self._logging_handler = LogHandler(self._logging_queue) + self._logging_handler = LogHandler(self._logging_queue, LogSource.SYSTEM) + self._status_update_event = Event() @asynccontextmanager async def serve_port_async(self, port): @@ -139,3 +144,19 @@ async def LogStream(self, request, context): yield self._logging_queue.popleft() except IndexError: await sleep(0.5) + + def update_status(self, status: int | ExporterStatus, message: str = ""): + """Update the current exporter status for the session.""" + if isinstance(status, int): + self._current_status = ExporterStatus.from_proto(status) + else: + self._current_status = status + self._status_message = message + + async def GetStatus(self, request, context): + """Get the current exporter status.""" + logger.debug("GetStatus() -> %s", self._current_status) + return jumpstarter_pb2.GetStatusResponse( + status=self._current_status.to_proto(), + status_message=self._status_message, + ) From 796bb90e7f3afcb2a4bdd7951f44df109b2397ca Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 29 Sep 2025 16:34:50 -0400 Subject: [PATCH 09/30] Improve logging infrastructure --- .../jumpstarter/jumpstarter/driver/base.py | 5 +- .../jumpstarter/exporter/exporter.py | 6 ++ .../jumpstarter/jumpstarter/exporter/hooks.py | 24 +++++--- .../jumpstarter/exporter/logging.py | 61 ++++++++++++++++++- .../jumpstarter/exporter/session.py | 18 +++++- 5 files changed, 101 insertions(+), 13 deletions(-) diff --git a/python/packages/jumpstarter/jumpstarter/driver/base.py b/python/packages/jumpstarter/jumpstarter/driver/base.py index 8c67264c..ee3cdd18 100644 --- a/python/packages/jumpstarter/jumpstarter/driver/base.py +++ b/python/packages/jumpstarter/jumpstarter/driver/base.py @@ -27,8 +27,9 @@ MARKER_STREAMCALL, MARKER_STREAMING_DRIVERCALL, ) -from jumpstarter.common import Metadata +from jumpstarter.common import LogSource, Metadata from jumpstarter.common.resources import ClientStreamResource, PresignedRequestResource, Resource, ResourceMetadata +from jumpstarter.exporter.logging import get_logger from jumpstarter.common.serde import decode_value, encode_value from jumpstarter.common.streams import ( DriverStreamRequest, @@ -86,7 +87,7 @@ def __post_init__(self): if hasattr(super(), "__post_init__"): super().__post_init__() - self.logger = logging.getLogger(self.__class__.__name__) + self.logger = get_logger(f"driver.{self.__class__.__name__}", LogSource.DRIVER) self.logger.setLevel(self.log_level) def close(self): diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index 1a48bece..c1f370d8 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -240,6 +240,8 @@ async def status(retries=5, backoff=3): # Shield the post-lease hook from cancellation and await it with CancelScope(shield=True): await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + # Pass the current session to hook executor for logging + self.hook_executor.main_session = self._current_session await self.hook_executor.execute_post_lease_hook(hook_context) await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") @@ -279,6 +281,8 @@ async def run_before_lease_hook(hook_ctx): await self._update_status( ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks" ) + # Pass the current session to hook executor for logging + self.hook_executor.main_session = self._current_session await self.hook_executor.execute_pre_lease_hook(hook_ctx) await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") logger.info("beforeLease hook completed successfully") @@ -311,6 +315,8 @@ async def run_before_lease_hook(hook_ctx): # Shield the post-lease hook from cancellation and await it with CancelScope(shield=True): await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + # Pass the current session to hook executor for logging + self.hook_executor.main_session = self._current_session await self.hook_executor.execute_post_lease_hook(hook_context) await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py index 16318ce3..d71b3e1a 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -7,9 +7,11 @@ from dataclasses import dataclass, field from typing import Callable +from jumpstarter.common import LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.config.exporter import HookConfigV1Alpha1 from jumpstarter.driver import Driver +from jumpstarter.exporter.logging import get_logger from jumpstarter.exporter.session import Session logger = logging.getLogger(__name__) @@ -32,6 +34,7 @@ class HookExecutor: config: HookConfigV1Alpha1 device_factory: Callable[[], Driver] + main_session: Session | None = field(default=None) timeout: int = field(init=False) def __post_init__(self): @@ -63,9 +66,9 @@ async def _create_hook_environment(self, context: HookContext): } ) - yield hook_env + yield session, hook_env - async def _execute_hook(self, command: str, context: HookContext) -> bool: + async def _execute_hook(self, command: str, context: HookContext, log_source: LogSource) -> bool: """Execute a single hook command.""" if not command or not command.strip(): logger.debug("Hook command is empty, skipping") @@ -73,7 +76,7 @@ async def _execute_hook(self, command: str, context: HookContext) -> bool: logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) - async with self._create_hook_environment(context) as hook_env: + async with self._create_hook_environment(context) as (session, hook_env): try: # Execute the hook command using shell process = await asyncio.create_subprocess_shell( @@ -84,6 +87,12 @@ async def _execute_hook(self, command: str, context: HookContext) -> bool: ) try: + # Determine which session to use for logging - prefer main session if available + logging_session = self.main_session if self.main_session is not None else session + + # Create a logger with automatic source registration + hook_logger = get_logger(f"hook.{context.lease_name}", log_source, logging_session) + # Stream output line-by-line for real-time logging output_lines = [] @@ -94,7 +103,8 @@ async def read_output(): break line_decoded = line.decode().rstrip() output_lines.append(line_decoded) - logger.info("[Hook Output] %s", line_decoded) + # Route hook output through the logging system + hook_logger.info(line_decoded) # Run output reading and process waiting concurrently with timeout await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=self.timeout) @@ -104,8 +114,6 @@ async def read_output(): return True else: logger.error("Hook failed with return code %d", process.returncode) - if output_lines: - logger.error("Hook output: %s", "\n".join(output_lines)) return False except asyncio.TimeoutError: @@ -129,7 +137,7 @@ async def execute_pre_lease_hook(self, context: HookContext) -> bool: return True logger.info("Executing pre-lease hook for lease %s", context.lease_name) - return await self._execute_hook(self.config.pre_lease, context) + return await self._execute_hook(self.config.pre_lease, context, LogSource.BEFORE_LEASE_HOOK) async def execute_post_lease_hook(self, context: HookContext) -> bool: """Execute the post-lease hook.""" @@ -138,4 +146,4 @@ async def execute_post_lease_hook(self, context: HookContext) -> bool: return True logger.info("Executing post-lease hook for lease %s", context.lease_name) - return await self._execute_hook(self.config.post_lease, context) + return await self._execute_hook(self.config.post_lease, context, LogSource.AFTER_LEASE_HOOK) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/logging.py b/python/packages/jumpstarter/jumpstarter/exporter/logging.py index 8b73467d..ec8243f0 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/logging.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/logging.py @@ -1,10 +1,16 @@ import logging from collections import deque +from contextlib import contextmanager +from threading import RLock +from typing import TYPE_CHECKING from jumpstarter_protocol import jumpstarter_pb2 from jumpstarter.common import LogSource +if TYPE_CHECKING: + from .session import Session + class LogHandler(logging.Handler): def __init__(self, queue: deque, source: LogSource = LogSource.UNSPECIFIED): @@ -12,16 +18,39 @@ def __init__(self, queue: deque, source: LogSource = LogSource.UNSPECIFIED): self.queue = queue self.listener = None self.source = source # LogSource enum value + self._lock = RLock() + self._child_handlers = {} # Dict of logger_name -> LogSource mappings + + def add_child_handler(self, logger_name: str, source: LogSource): + """Add a child handler that will route logs from a specific logger with a different source.""" + with self._lock: + self._child_handlers[logger_name] = source + + def remove_child_handler(self, logger_name: str): + """Remove a child handler mapping.""" + with self._lock: + self._child_handlers.pop(logger_name, None) + + def get_source_for_record(self, record): + """Determine the appropriate log source for a record.""" + with self._lock: + # Check if this record comes from a logger with a specific source mapping + logger_name = record.name + for mapped_logger, source in self._child_handlers.items(): + if logger_name.startswith(mapped_logger): + return source + return self.source def enqueue(self, record): self.queue.append(record) def prepare(self, record): + source = self.get_source_for_record(record) return jumpstarter_pb2.LogStreamResponse( uuid="", severity=record.levelname, message=self.format(record), - source=self.source.value, # Convert to proto value + source=source.value, # Convert to proto value ) def emit(self, record): @@ -29,3 +58,33 @@ def emit(self, record): self.enqueue(self.prepare(record)) except Exception: self.handleError(record) + + @contextmanager + def context_log_source(self, logger_name: str, source: LogSource): + """Context manager to temporarily set a log source for a specific logger.""" + self.add_child_handler(logger_name, source) + try: + yield + finally: + self.remove_child_handler(logger_name) + + +def get_logger(name: str, source: LogSource = LogSource.SYSTEM, session: "Session" = None) -> logging.Logger: + """ + Get a logger with automatic LogSource mapping. + + Args: + name: Logger name (e.g., __name__ or custom name) + source: The LogSource to associate with this logger + session: Optional session to register with immediately + + Returns: + A standard Python logger instance + """ + logger = logging.getLogger(name) + + # If session provided, register the source mapping + if session: + session.add_logger_source(name, source) + + return logger diff --git a/python/packages/jumpstarter/jumpstarter/exporter/session.py b/python/packages/jumpstarter/jumpstarter/exporter/session.py index f9f2340d..13d1a462 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/session.py @@ -17,8 +17,7 @@ ) from .logging import LogHandler -from jumpstarter.common import ExporterStatus, Metadata, TemporarySocket -from jumpstarter.common.enums import LogSource +from jumpstarter.common import ExporterStatus, LogSource, Metadata, TemporarySocket from jumpstarter.common.streams import StreamRequestMetadata from jumpstarter.driver import Driver from jumpstarter.streams.common import forward_stream @@ -74,6 +73,9 @@ def __init__(self, *args, root_device, **kwargs): self._logging_handler = LogHandler(self._logging_queue, LogSource.SYSTEM) self._status_update_event = Event() + # Map all driver logs to DRIVER source + self._logging_handler.add_child_handler("driver.", LogSource.DRIVER) + @asynccontextmanager async def serve_port_async(self, port): server = grpc.aio.server() @@ -153,6 +155,18 @@ def update_status(self, status: int | ExporterStatus, message: str = ""): self._current_status = status self._status_message = message + def add_logger_source(self, logger_name: str, source: LogSource): + """Add a log source mapping for a specific logger.""" + self._logging_handler.add_child_handler(logger_name, source) + + def remove_logger_source(self, logger_name: str): + """Remove a log source mapping for a specific logger.""" + self._logging_handler.remove_child_handler(logger_name) + + def context_log_source(self, logger_name: str, source: LogSource): + """Context manager to temporarily set a log source for a specific logger.""" + return self._logging_handler.context_log_source(logger_name, source) + async def GetStatus(self, request, context): """Get the current exporter status.""" logger.debug("GetStatus() -> %s", self._current_status) From 835ec6e980061142d2ae163c65e151d19d1555c9 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Fri, 31 Oct 2025 11:03:24 -0400 Subject: [PATCH 10/30] Fix circular dependency in logging.py --- .../jumpstarter/exporter/logging.py | 9 ++++---- .../jumpstarter/exporter/logging_protocol.py | 22 +++++++++++++++++++ 2 files changed, 26 insertions(+), 5 deletions(-) create mode 100644 python/packages/jumpstarter/jumpstarter/exporter/logging_protocol.py diff --git a/python/packages/jumpstarter/jumpstarter/exporter/logging.py b/python/packages/jumpstarter/jumpstarter/exporter/logging.py index ec8243f0..6a6e8dad 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/logging.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/logging.py @@ -2,15 +2,12 @@ from collections import deque from contextlib import contextmanager from threading import RLock -from typing import TYPE_CHECKING from jumpstarter_protocol import jumpstarter_pb2 +from .logging_protocol import LoggerRegistration from jumpstarter.common import LogSource -if TYPE_CHECKING: - from .session import Session - class LogHandler(logging.Handler): def __init__(self, queue: deque, source: LogSource = LogSource.UNSPECIFIED): @@ -69,7 +66,9 @@ def context_log_source(self, logger_name: str, source: LogSource): self.remove_child_handler(logger_name) -def get_logger(name: str, source: LogSource = LogSource.SYSTEM, session: "Session" = None) -> logging.Logger: +def get_logger( + name: str, source: LogSource = LogSource.SYSTEM, session: LoggerRegistration | None = None +) -> logging.Logger: """ Get a logger with automatic LogSource mapping. diff --git a/python/packages/jumpstarter/jumpstarter/exporter/logging_protocol.py b/python/packages/jumpstarter/jumpstarter/exporter/logging_protocol.py new file mode 100644 index 00000000..04ed885f --- /dev/null +++ b/python/packages/jumpstarter/jumpstarter/exporter/logging_protocol.py @@ -0,0 +1,22 @@ +"""Protocol for logger registration to avoid circular dependencies.""" + +from typing import Protocol + +from jumpstarter.common import LogSource + + +class LoggerRegistration(Protocol): + """Protocol for objects that can register logger sources. + + This protocol defines the interface for objects that can associate + logger names with log sources, enabling proper routing of log messages. + """ + + def add_logger_source(self, logger_name: str, source: LogSource) -> None: + """Register a logger name with its corresponding log source. + + Args: + logger_name: Name of the logger to register + source: The log source category for this logger + """ + ... From 592ddbd946bbfad81c976860636c6bcaf69264b1 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Fri, 31 Oct 2025 17:57:45 -0400 Subject: [PATCH 11/30] Update hook behavior to match spec --- .../jumpstarter/config/exporter.py | 27 +- .../jumpstarter/config/exporter_test.py | 35 +-- .../jumpstarter/exporter/exporter.py | 132 ++++++--- .../jumpstarter/jumpstarter/exporter/hooks.py | 252 ++++++++++++----- .../jumpstarter/exporter/hooks_test.py | 256 +++++++++++++++--- 5 files changed, 530 insertions(+), 172 deletions(-) diff --git a/python/packages/jumpstarter/jumpstarter/config/exporter.py b/python/packages/jumpstarter/jumpstarter/config/exporter.py index 893f0449..3e7b88b2 100644 --- a/python/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/config/exporter.py @@ -18,14 +18,31 @@ from jumpstarter.driver import Driver +class HookInstanceConfigV1Alpha1(BaseModel): + """Configuration for a specific lifecycle hook.""" + + model_config = ConfigDict(populate_by_name=True) + + script: str = Field(alias="script", description="The j script to execute for this hook") + timeout: int = Field(default=120, description="The hook execution timeout in seconds (default: 120s)") + exit_code: int = Field(alias="exitCode", default=0, description="The expected exit code (default: 0)") + on_failure: Literal["pass", "block", "warn"] = Field( + default="pass", + alias="onFailure", + description=( + "Action to take when the expected exit code is not returned: 'pass' continues normally, " + "'block' takes the exporter offline and blocks leases, 'warn' continues and prints a warning" + ), + ) + + class HookConfigV1Alpha1(BaseModel): """Configuration for lifecycle hooks.""" model_config = ConfigDict(populate_by_name=True) - pre_lease: str | None = Field(default=None, alias="preLease") - post_lease: str | None = Field(default=None, alias="postLease") - timeout: int = Field(default=300, description="Hook execution timeout in seconds") + before_lease: HookInstanceConfigV1Alpha1 | None = Field(default=None, alias="beforeLease") + after_lease: HookInstanceConfigV1Alpha1 | None = Field(default=None, alias="afterLease") class ExporterConfigV1Alpha1DriverInstanceProxy(BaseModel): @@ -62,7 +79,7 @@ def instantiate(self) -> Driver: description=self.root.description, methods_description=self.root.methods_description, children=children, - **self.root.config + **self.root.config, ) case ExporterConfigV1Alpha1DriverInstanceComposite(): @@ -198,7 +215,7 @@ async def channel_factory(): # Create hook executor if hooks are configured hook_executor = None - if self.hooks.pre_lease or self.hooks.post_lease: + if self.hooks.before_lease or self.hooks.after_lease: from jumpstarter.exporter.hooks import HookExecutor hook_executor = HookExecutor( diff --git a/python/packages/jumpstarter/jumpstarter/config/exporter_test.py b/python/packages/jumpstarter/jumpstarter/config/exporter_test.py index eebce783..68d0e3f4 100644 --- a/python/packages/jumpstarter/jumpstarter/config/exporter_test.py +++ b/python/packages/jumpstarter/jumpstarter/config/exporter_test.py @@ -116,13 +116,16 @@ def test_exporter_config_with_hooks(monkeypatch: pytest.MonkeyPatch, tmp_path: P endpoint: "jumpstarter.my-lab.com:1443" token: "test-token" hooks: - preLease: | - echo "Pre-lease hook for $LEASE_NAME" - j power on - postLease: | - echo "Post-lease hook for $LEASE_NAME" - j power off - timeout: 600 + beforeLease: + script: | + echo "Pre-lease hook for $LEASE_NAME" + j power on + timeout: 600 + afterLease: + script: | + echo "Post-lease hook for $LEASE_NAME" + j power off + timeout: 600 export: power: type: "jumpstarter_driver_power.driver.PduPower" @@ -134,22 +137,20 @@ def test_exporter_config_with_hooks(monkeypatch: pytest.MonkeyPatch, tmp_path: P config = ExporterConfigV1Alpha1.load("test-hooks") - assert config.hooks.pre_lease == 'echo "Pre-lease hook for $LEASE_NAME"\nj power on\n' - assert config.hooks.post_lease == 'echo "Post-lease hook for $LEASE_NAME"\nj power off\n' - assert config.hooks.timeout == 600 + assert config.hooks.before_lease.script == 'echo "Pre-lease hook for $LEASE_NAME"\nj power on\n' + assert config.hooks.after_lease.script == 'echo "Post-lease hook for $LEASE_NAME"\nj power off\n' # Test that it round-trips correctly path.unlink() ExporterConfigV1Alpha1.save(config) reloaded_config = ExporterConfigV1Alpha1.load("test-hooks") - assert reloaded_config.hooks.pre_lease == config.hooks.pre_lease - assert reloaded_config.hooks.post_lease == config.hooks.post_lease - assert reloaded_config.hooks.timeout == config.hooks.timeout + assert reloaded_config.hooks.before_lease.script == config.hooks.before_lease.script + assert reloaded_config.hooks.after_lease.script == config.hooks.after_lease.script # Test that the YAML uses camelCase yaml_output = ExporterConfigV1Alpha1.dump_yaml(config) - assert "preLease:" in yaml_output - assert "postLease:" in yaml_output - assert "pre_lease:" not in yaml_output - assert "post_lease:" not in yaml_output + assert "beforeLease:" in yaml_output + assert "afterLease:" in yaml_output + assert "before_lease:" not in yaml_output + assert "after_lease:" not in yaml_output diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index c1f370d8..301c57bb 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -26,7 +26,7 @@ from jumpstarter.common.streams import connect_router_stream from jumpstarter.config.tls import TLSConfigV1Alpha1 from jumpstarter.driver import Driver -from jumpstarter.exporter.hooks import HookContext, HookExecutor +from jumpstarter.exporter.hooks import HookContext, HookExecutionError, HookExecutor from jumpstarter.exporter.session import Session logger = logging.getLogger(__name__) @@ -49,6 +49,7 @@ class Exporter(AsyncContextManagerMixin, Metadata): _pre_lease_ready: Event | None = field(init=False, default=None) _current_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) _current_session: Session | None = field(init=False, default=None) + _session_socket_path: str | None = field(init=False, default=None) def stop(self, wait_for_lease_exit=False, should_unregister=False): """Signal the exporter to stop. @@ -183,13 +184,18 @@ async def listen(retries=5, backoff=3): tg.start_soon(listen) - # Wait for pre-lease hook to complete before processing connections - if self._pre_lease_ready is not None: - logger.info("Waiting for pre-lease hook to complete before accepting connections") - await self._pre_lease_ready.wait() - logger.info("Pre-lease hook completed, now accepting connections") - + # Create session before hooks run async with self.session() as path: + # Store socket path for hook execution + self._session_socket_path = path + + # Wait for before-lease hook to complete before processing connections + if self._pre_lease_ready is not None: + logger.info("Waiting for before-lease hook to complete before accepting connections") + await self._pre_lease_ready.wait() + logger.info("before-lease hook completed, now accepting connections") + + # Process client connections async for request in listen_rx: logger.info("Handling new connection request on lease %s", lease_name) tg.start_soon( @@ -231,19 +237,15 @@ async def status(retries=5, backoff=3): tg.start_soon(status) async for status in status_rx: if self.lease_name != "" and self.lease_name != status.lease_name: - # Post-lease hook for the previous lease + # After-lease hook for the previous lease if self.hook_executor and self._current_client_name: hook_context = HookContext( lease_name=self.lease_name, client_name=self._current_client_name, ) - # Shield the post-lease hook from cancellation and await it + # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): - await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") - # Pass the current session to hook executor for logging - self.hook_executor.main_session = self._current_session - await self.hook_executor.execute_post_lease_hook(hook_context) - await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") + await self.run_after_lease_hook(hook_context) self.lease_name = status.lease_name logger.info("Lease status changed, killing existing connections") @@ -267,37 +269,14 @@ async def status(retries=5, backoff=3): logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) self._current_client_name = status.client_name - # Pre-lease hook when transitioning from unleased to leased + # Before-lease hook when transitioning from unleased to leased if not previous_leased: if self.hook_executor: hook_context = HookContext( lease_name=status.lease_name, client_name=status.client_name, ) - - # Start pre-lease hook asynchronously - async def run_before_lease_hook(hook_ctx): - try: - await self._update_status( - ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks" - ) - # Pass the current session to hook executor for logging - self.hook_executor.main_session = self._current_session - await self.hook_executor.execute_pre_lease_hook(hook_ctx) - await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") - logger.info("beforeLease hook completed successfully") - except Exception as e: - logger.error("beforeLease hook failed: %s", e) - # Still transition to ready even if hook fails - await self._update_status( - ExporterStatus.LEASE_READY, f"Ready (beforeLease hook failed: {e})" - ) - finally: - # Always set the event to unblock connections - if self._pre_lease_ready: - self._pre_lease_ready.set() - - tg.start_soon(run_before_lease_hook, hook_context) + tg.start_soon(self.run_before_lease_hook, self, hook_context) else: # No hook configured, set event immediately await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") @@ -306,18 +285,21 @@ async def run_before_lease_hook(hook_ctx): else: logger.info("Currently not leased") - # Post-lease hook when transitioning from leased to unleased + # After-lease hook when transitioning from leased to unleased if previous_leased and self.hook_executor and self._current_client_name: hook_context = HookContext( lease_name=self.lease_name, client_name=self._current_client_name, ) - # Shield the post-lease hook from cancellation and await it + # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") # Pass the current session to hook executor for logging self.hook_executor.main_session = self._current_session - await self.hook_executor.execute_post_lease_hook(hook_context) + # Use session socket if available, otherwise create new session + await self.hook_executor.execute_after_lease_hook( + hook_context, socket_path=self._session_socket_path + ) await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") self._current_client_name = "" @@ -330,3 +312,69 @@ async def run_before_lease_hook(hook_ctx): self._previous_leased = current_leased self._tg = None + + async def run_before_lease_hook(self, hook_ctx: HookContext): + """ + Execute the before-lease hook for the current exporter session. + + Args: + hook_ctx (HookContext): The current hook execution context + """ + try: + await self._update_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks") + # Pass the current session to hook executor for logging + self.hook_executor.main_session = self._current_session + + # Wait for socket path to be available + while self._session_socket_path is None: + await sleep(0.1) + + # Execute hook with main session socket + await self.hook_executor.execute_before_lease_hook(hook_ctx, socket_path=self._session_socket_path) + await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") + logger.info("beforeLease hook completed successfully") + except HookExecutionError as e: + # Hook failed with on_failure='block' - end lease and set failed status + logger.error("beforeLease hook failed (on_failure=block): %s", e) + await self._update_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed (on_failure=block): {e}" + ) + # Note: We don't take the exporter offline for before_lease hook failures + # The lease is simply not ready, and the exporter remains available for future leases + except Exception as e: + # Unexpected error during hook execution + logger.error("beforeLease hook failed with unexpected error: %s", e, exc_info=True) + await self._update_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed: {e}") + finally: + # Always set the event to unblock connections + if self._pre_lease_ready: + self._pre_lease_ready.set() + + async def run_after_lease_hook(self, hook_ctx: HookContext): + """ + Execute the after-lease hook for the current exporter session. + + Args: + hook_ctx (HookContext): The current hook execution context + """ + try: + await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + # Pass the current session to hook executor for logging + self.hook_executor.main_session = self._current_session + # Use session socket if available, otherwise create new session + await self.hook_executor.execute_after_lease_hook(hook_ctx, socket_path=self._session_socket_path) + await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") + logger.info("afterLease hook completed successfully") + except HookExecutionError as e: + # Hook failed with on_failure='block' - set failed status and shut down exporter + logger.error("afterLease hook failed (on_failure=block): %s", e) + await self._update_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed (on_failure=block): {e}" + ) + # Shut down the exporter after after_lease hook failure with on_failure='block' + logger.error("Shutting down exporter due to afterLease hook failure") + self.stop() + except Exception as e: + # Unexpected error during hook execution + logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) + await self._update_status(ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed: {e}") diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py index d71b3e1a..63ca84c3 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -9,7 +9,7 @@ from jumpstarter.common import LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST -from jumpstarter.config.exporter import HookConfigV1Alpha1 +from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 from jumpstarter.driver import Driver from jumpstarter.exporter.logging import get_logger from jumpstarter.exporter.session import Session @@ -17,6 +17,12 @@ logger = logging.getLogger(__name__) +class HookExecutionError(Exception): + """Raised when a hook fails and on_failure is set to 'block'.""" + + pass + + @dataclass(kw_only=True) class HookContext: """Context information passed to hooks.""" @@ -35,10 +41,6 @@ class HookExecutor: config: HookConfigV1Alpha1 device_factory: Callable[[], Driver] main_session: Session | None = field(default=None) - timeout: int = field(init=False) - - def __post_init__(self): - self.timeout = self.config.timeout @asynccontextmanager async def _create_hook_environment(self, context: HookContext): @@ -68,82 +70,196 @@ async def _create_hook_environment(self, context: HookContext): yield session, hook_env - async def _execute_hook(self, command: str, context: HookContext, log_source: LogSource) -> bool: - """Execute a single hook command.""" + async def _execute_hook( + self, + hook_config: HookInstanceConfigV1Alpha1, + context: HookContext, + log_source: LogSource, + socket_path: str | None = None, + ) -> bool: + """Execute a single hook command. + + Args: + hook_config: Hook configuration including script, timeout, exit_code, and on_failure + context: Hook context information + log_source: Log source for hook output + socket_path: Optional Unix socket path to reuse existing session. + If provided, hooks will access the main session instead of creating their own. + """ + command = hook_config.script if not command or not command.strip(): logger.debug("Hook command is empty, skipping") return True logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) - async with self._create_hook_environment(context) as (session, hook_env): + # If socket_path provided, use existing session; otherwise create new one + if socket_path is not None: + # Reuse existing session - create environment without session creation + hook_env = os.environ.copy() + hook_env.update( + { + JUMPSTARTER_HOST: str(socket_path), + JMP_DRIVERS_ALLOW: "UNSAFE", + "LEASE_NAME": context.lease_name, + "CLIENT_NAME": context.client_name, + "LEASE_DURATION": context.lease_duration, + "EXPORTER_NAME": context.exporter_name, + "EXPORTER_NAMESPACE": context.exporter_namespace, + } + ) + + # Use main session for logging (must be available when socket_path is provided) + logging_session = self.main_session + if logging_session is None: + raise ValueError("main_session must be set when reusing socket_path") + + return await self._execute_hook_process(hook_config, context, log_source, hook_env, logging_session) + else: + # Create new session for hook execution (fallback/standalone mode) + async with self._create_hook_environment(context) as (session, hook_env): + # Determine which session to use for logging + logging_session = self.main_session if self.main_session is not None else session + return await self._execute_hook_process(hook_config, context, log_source, hook_env, logging_session) + + async def _execute_hook_process( + self, + hook_config: HookInstanceConfigV1Alpha1, + context: HookContext, + log_source: LogSource, + hook_env: dict, + logging_session: Session, + ) -> bool: + """Execute the hook process with the given environment and logging session.""" + command = hook_config.script + timeout = hook_config.timeout + expected_exit_code = hook_config.exit_code + on_failure = hook_config.on_failure + + try: + # Execute the hook command using shell + process = await asyncio.create_subprocess_shell( + command, + env=hook_env, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.STDOUT, + ) + try: - # Execute the hook command using shell - process = await asyncio.create_subprocess_shell( - command, - env=hook_env, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.STDOUT, - ) + # Create a logger with automatic source registration + hook_logger = get_logger(f"hook.{context.lease_name}", log_source, logging_session) - try: - # Determine which session to use for logging - prefer main session if available - logging_session = self.main_session if self.main_session is not None else session - - # Create a logger with automatic source registration - hook_logger = get_logger(f"hook.{context.lease_name}", log_source, logging_session) - - # Stream output line-by-line for real-time logging - output_lines = [] - - async def read_output(): - while True: - line = await process.stdout.readline() - if not line: - break - line_decoded = line.decode().rstrip() - output_lines.append(line_decoded) - # Route hook output through the logging system - hook_logger.info(line_decoded) - - # Run output reading and process waiting concurrently with timeout - await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=self.timeout) - - if process.returncode == 0: - logger.info("Hook executed successfully") + # Stream output line-by-line for real-time logging + output_lines = [] + + async def read_output(): + while True: + line = await process.stdout.readline() + if not line: + break + line_decoded = line.decode().rstrip() + output_lines.append(line_decoded) + # Route hook output through the logging system + hook_logger.info(line_decoded) + + # Run output reading and process waiting concurrently with timeout + await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=timeout) + + # Check if exit code matches expected + if process.returncode == expected_exit_code: + logger.info("Hook executed successfully with exit code %d", process.returncode) + return True + else: + # Exit code mismatch - handle according to on_failure setting + error_msg = f"Hook failed: expected exit code {expected_exit_code}, got {process.returncode}" + + if on_failure == "pass": + logger.info("%s (on_failure=pass, continuing)", error_msg) + return True + elif on_failure == "warn": + logger.warning("%s (on_failure=warn, continuing)", error_msg) return True - else: - logger.error("Hook failed with return code %d", process.returncode) - return False + else: # on_failure == "block" + logger.error("%s (on_failure=block, raising exception)", error_msg) + raise HookExecutionError(error_msg) + except asyncio.TimeoutError: + error_msg = f"Hook timed out after {timeout} seconds" + logger.error(error_msg) + try: + process.terminate() + await asyncio.wait_for(process.wait(), timeout=5) except asyncio.TimeoutError: - logger.error("Hook timed out after %d seconds", self.timeout) - try: - process.terminate() - await asyncio.wait_for(process.wait(), timeout=5) - except asyncio.TimeoutError: - process.kill() - await process.wait() - return False - - except Exception as e: - logger.error("Error executing hook: %s", e, exc_info=True) - return False - - async def execute_pre_lease_hook(self, context: HookContext) -> bool: - """Execute the pre-lease hook.""" - if not self.config.pre_lease: - logger.debug("No pre-lease hook configured") + process.kill() + await process.wait() + + # Handle timeout according to on_failure setting + if on_failure == "pass": + logger.info("%s (on_failure=pass, continuing)", error_msg) + return True + elif on_failure == "warn": + logger.warning("%s (on_failure=warn, continuing)", error_msg) + return True + else: # on_failure == "block" + raise HookExecutionError(error_msg) + + except HookExecutionError: + # Re-raise HookExecutionError to propagate to exporter + raise + except Exception as e: + error_msg = f"Error executing hook: {e}" + logger.error(error_msg, exc_info=True) + + # Handle exception according to on_failure setting + if on_failure == "pass": + logger.info("%s (on_failure=pass, continuing)", error_msg) + return True + elif on_failure == "warn": + logger.warning("%s (on_failure=warn, continuing)", error_msg) + return True + else: # on_failure == "block" + raise HookExecutionError(error_msg) from e + + async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None) -> bool: + """Execute the before-lease hook. + + Args: + context: Hook context information + socket_path: Optional Unix socket path to reuse existing session + + Raises: + HookExecutionError: If hook fails and on_failure is set to 'block' + """ + if not self.config.before_lease: + logger.debug("No before-lease hook configured") return True - logger.info("Executing pre-lease hook for lease %s", context.lease_name) - return await self._execute_hook(self.config.pre_lease, context, LogSource.BEFORE_LEASE_HOOK) + logger.info("Executing before-lease hook for lease %s", context.lease_name) + return await self._execute_hook( + self.config.before_lease, + context, + LogSource.BEFORE_LEASE_HOOK, + socket_path, + ) + + async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None) -> bool: + """Execute the after-lease hook. + + Args: + context: Hook context information + socket_path: Optional Unix socket path to reuse existing session - async def execute_post_lease_hook(self, context: HookContext) -> bool: - """Execute the post-lease hook.""" - if not self.config.post_lease: - logger.debug("No post-lease hook configured") + Raises: + HookExecutionError: If hook fails and on_failure is set to 'block' + """ + if not self.config.after_lease: + logger.debug("No after-lease hook configured") return True - logger.info("Executing post-lease hook for lease %s", context.lease_name) - return await self._execute_hook(self.config.post_lease, context, LogSource.AFTER_LEASE_HOOK) + logger.info("Executing after-lease hook for lease %s", context.lease_name) + return await self._execute_hook( + self.config.after_lease, + context, + LogSource.AFTER_LEASE_HOOK, + socket_path, + ) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py index 576ca681..bf86d797 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -4,9 +4,9 @@ import pytest from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST -from jumpstarter.config.exporter import HookConfigV1Alpha1 +from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 from jumpstarter.driver import Driver -from jumpstarter.exporter.hooks import HookContext, HookExecutor +from jumpstarter.exporter.hooks import HookContext, HookExecutionError, HookExecutor pytestmark = pytest.mark.anyio @@ -34,9 +34,8 @@ def factory(): @pytest.fixture def hook_config(): return HookConfigV1Alpha1( - pre_lease="echo 'Pre-lease hook executed'", - post_lease="echo 'Post-lease hook executed'", - timeout=10, + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), + after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease hook executed'", timeout=10), ) @@ -60,7 +59,6 @@ async def test_hook_executor_creation(self, hook_config, mock_device_factory): assert executor.config == hook_config assert executor.device_factory == mock_device_factory - assert executor.timeout == 10 async def test_empty_hook_execution(self, mock_device_factory, hook_context): empty_config = HookConfigV1Alpha1() @@ -70,13 +68,12 @@ async def test_empty_hook_execution(self, mock_device_factory, hook_context): ) # Both hooks should return True for empty/None commands - assert await executor.execute_pre_lease_hook(hook_context) is True - assert await executor.execute_post_lease_hook(hook_context) is True + assert await executor.execute_before_lease_hook(hook_context) is True + assert await executor.execute_after_lease_hook(hook_context) is True async def test_successful_hook_execution(self, mock_device_factory, hook_context): hook_config = HookConfigV1Alpha1( - pre_lease="echo 'Pre-lease hook executed'", - timeout=10, + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) # Mock the Session and serve_unix_async with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -103,7 +100,7 @@ async def test_successful_hook_execution(self, mock_device_factory, hook_context device_factory=mock_device_factory, ) - result = await executor.execute_pre_lease_hook(hook_context) + result = await executor.execute_before_lease_hook(hook_context) assert result is True @@ -122,8 +119,9 @@ async def test_successful_hook_execution(self, mock_device_factory, hook_context async def test_failed_hook_execution(self, mock_device_factory, hook_context): failed_config = HookConfigV1Alpha1( - pre_lease="exit 1", # Command that will fail - timeout=10, + before_lease=HookInstanceConfigV1Alpha1( + script="exit 1", timeout=10, on_failure="block" + ), # Command that will fail with on_failure="block" ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -149,14 +147,15 @@ async def test_failed_hook_execution(self, mock_device_factory, hook_context): device_factory=mock_device_factory, ) - result = await executor.execute_pre_lease_hook(hook_context) - - assert result is False + # Should raise HookExecutionError since on_failure="block" + with pytest.raises(HookExecutionError, match="expected exit code 0, got 1"): + await executor.execute_before_lease_hook(hook_context) async def test_hook_timeout(self, mock_device_factory, hook_context): timeout_config = HookConfigV1Alpha1( - pre_lease="sleep 60", # Command that will timeout - timeout=1, # 1 second timeout + before_lease=HookInstanceConfigV1Alpha1( + script="sleep 60", timeout=1, on_failure="block" + ), # Command that will timeout with on_failure="block" ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -180,15 +179,15 @@ async def test_hook_timeout(self, mock_device_factory, hook_context): device_factory=mock_device_factory, ) - result = await executor.execute_pre_lease_hook(hook_context) + # Should raise HookExecutionError since on_failure="block" + with pytest.raises(HookExecutionError, match="timed out after 1 seconds"): + await executor.execute_before_lease_hook(hook_context) - assert result is False mock_process.terminate.assert_called_once() async def test_hook_environment_variables(self, mock_device_factory, hook_context): hook_config = HookConfigV1Alpha1( - pre_lease="echo 'Pre-lease hook executed'", - timeout=10, + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: mock_session = Mock() @@ -211,7 +210,7 @@ async def test_hook_environment_variables(self, mock_device_factory, hook_contex device_factory=mock_device_factory, ) - await executor.execute_pre_lease_hook(hook_context) + await executor.execute_before_lease_hook(hook_context) # Check that all expected environment variables are set call_args = mock_subprocess.call_args @@ -228,8 +227,9 @@ async def test_hook_environment_variables(self, mock_device_factory, hook_contex async def test_real_time_output_logging(self, mock_device_factory, hook_context): """Test that hook output is logged in real-time at INFO level.""" hook_config = HookConfigV1Alpha1( - pre_lease="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", - timeout=10, + before_lease=HookInstanceConfigV1Alpha1( + script="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", timeout=10 + ), ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -260,25 +260,22 @@ async def test_real_time_output_logging(self, mock_device_factory, hook_context) device_factory=mock_device_factory, ) - result = await executor.execute_pre_lease_hook(hook_context) + result = await executor.execute_before_lease_hook(hook_context) assert result is True # Verify that output lines were logged in real-time at INFO level expected_calls = [ - call.info("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), - call.info("[Hook Output] %s", "Line 1"), - call.info("[Hook Output] %s", "Line 2"), - call.info("[Hook Output] %s", "Line 3"), - call.info("Hook executed successfully"), + call("Executing before-lease hook for lease %s", "test-lease-123"), + call("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), + call("Hook executed successfully with exit code %d", 0), ] mock_logger.info.assert_has_calls(expected_calls, any_order=False) async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, hook_context): """Test that post-lease hook executes when called directly.""" hook_config = HookConfigV1Alpha1( - post_lease="echo 'Post-lease cleanup completed'", - timeout=10, + after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease cleanup completed'", timeout=10), ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -298,22 +295,201 @@ async def test_post_lease_hook_execution_on_completion(self, mock_device_factory mock_process.wait = AsyncMock(return_value=None) # Mock the logger to capture log calls - with patch("jumpstarter.exporter.hooks.logger") as mock_logger, \ - patch("asyncio.create_subprocess_shell", return_value=mock_process): + with ( + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + patch("asyncio.create_subprocess_shell", return_value=mock_process), + ): executor = HookExecutor( config=hook_config, device_factory=mock_device_factory, ) - result = await executor.execute_post_lease_hook(hook_context) + result = await executor.execute_after_lease_hook(hook_context) assert result is True # Verify that post-lease hook output was logged expected_calls = [ - call.info("Executing post-lease hook for lease %s", "test-lease-123"), - call.info("Executing hook: %s", "echo 'Post-lease cleanup completed'"), - call.info("[Hook Output] %s", "Post-lease cleanup completed"), - call.info("Hook executed successfully"), + call("Executing after-lease hook for lease %s", "test-lease-123"), + call("Executing hook: %s", "echo 'Post-lease cleanup completed'"), + call("Hook executed successfully with exit code %d", 0), ] mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + async def test_hook_exit_code_matching_success(self, mock_device_factory, hook_context): + """Test that hook succeeds when exit code matches expected value.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="exit 0", timeout=10, exit_code=0), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 0 + mock_process.stdout.readline.side_effect = [b""] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + + async def test_hook_exit_code_matching_custom(self, mock_device_factory, hook_context): + """Test that hook succeeds when exit code matches custom expected value.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="exit 42", timeout=10, exit_code=42), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 42 + mock_process.stdout.readline.side_effect = [b""] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + + async def test_hook_exit_code_mismatch_pass(self, mock_device_factory, hook_context): + """Test that hook succeeds when exit code mismatches but on_failure='pass'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="pass"), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 1 + mock_process.stdout.readline.side_effect = [b""] + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + # Verify INFO log was created + mock_logger.info.assert_any_call( + "Hook failed: expected exit code 0, got 1 (on_failure=pass, continuing)" + ) + + async def test_hook_exit_code_mismatch_warn(self, mock_device_factory, hook_context): + """Test that hook succeeds when exit code mismatches but on_failure='warn'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="warn"), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 1 + mock_process.stdout.readline.side_effect = [b""] + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + # Verify WARNING log was created + mock_logger.warning.assert_any_call( + "Hook failed: expected exit code 0, got 1 (on_failure=warn, continuing)" + ) + + async def test_hook_exit_code_mismatch_block(self, mock_device_factory, hook_context): + """Test that hook raises exception when exit code mismatches and on_failure='block'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="block"), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 1 + mock_process.stdout.readline.side_effect = [b""] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + with pytest.raises(HookExecutionError, match="expected exit code 0, got 1"): + await executor.execute_before_lease_hook(hook_context) + + async def test_hook_timeout_with_pass(self, mock_device_factory, hook_context): + """Test that hook succeeds when timeout occurs but on_failure='pass'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="pass"), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.terminate = AsyncMock(return_value=None) + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + # Verify INFO log was created + assert any("on_failure=pass, continuing" in str(call) for call in mock_logger.info.call_args_list) + + async def test_hook_timeout_with_warn(self, mock_device_factory, hook_context): + """Test that hook succeeds when timeout occurs but on_failure='warn'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="warn"), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.terminate = AsyncMock(return_value=None) + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + # Verify WARNING log was created + assert any("on_failure=warn, continuing" in str(call) for call in mock_logger.warning.call_args_list) From 4d23cb189991ea5eac08c5fe8e457b2d42e8c214 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 3 Nov 2025 17:07:37 -0500 Subject: [PATCH 12/30] Improve hook error handling --- .../jumpstarter/jumpstarter/exporter/hooks.py | 36 +++++++++---------- .../jumpstarter/exporter/hooks_test.py | 8 ++--- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py index 63ca84c3..77803b28 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -76,7 +76,7 @@ async def _execute_hook( context: HookContext, log_source: LogSource, socket_path: str | None = None, - ) -> bool: + ): """Execute a single hook command. Args: @@ -89,7 +89,7 @@ async def _execute_hook( command = hook_config.script if not command or not command.strip(): logger.debug("Hook command is empty, skipping") - return True + return logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) @@ -129,7 +129,7 @@ async def _execute_hook_process( log_source: LogSource, hook_env: dict, logging_session: Session, - ) -> bool: + ): """Execute the hook process with the given environment and logging session.""" command = hook_config.script timeout = hook_config.timeout @@ -168,22 +168,22 @@ async def read_output(): # Check if exit code matches expected if process.returncode == expected_exit_code: logger.info("Hook executed successfully with exit code %d", process.returncode) - return True + return else: # Exit code mismatch - handle according to on_failure setting error_msg = f"Hook failed: expected exit code {expected_exit_code}, got {process.returncode}" if on_failure == "pass": logger.info("%s (on_failure=pass, continuing)", error_msg) - return True + return elif on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) - return True + return else: # on_failure == "block" logger.error("%s (on_failure=block, raising exception)", error_msg) raise HookExecutionError(error_msg) - except asyncio.TimeoutError: + except asyncio.TimeoutError as e: error_msg = f"Hook timed out after {timeout} seconds" logger.error(error_msg) try: @@ -196,12 +196,12 @@ async def read_output(): # Handle timeout according to on_failure setting if on_failure == "pass": logger.info("%s (on_failure=pass, continuing)", error_msg) - return True + return elif on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) - return True + return else: # on_failure == "block" - raise HookExecutionError(error_msg) + raise HookExecutionError(error_msg) from e except HookExecutionError: # Re-raise HookExecutionError to propagate to exporter @@ -213,14 +213,14 @@ async def read_output(): # Handle exception according to on_failure setting if on_failure == "pass": logger.info("%s (on_failure=pass, continuing)", error_msg) - return True + return elif on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) - return True + return else: # on_failure == "block" raise HookExecutionError(error_msg) from e - async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None) -> bool: + async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None): """Execute the before-lease hook. Args: @@ -232,17 +232,17 @@ async def execute_before_lease_hook(self, context: HookContext, socket_path: str """ if not self.config.before_lease: logger.debug("No before-lease hook configured") - return True + return logger.info("Executing before-lease hook for lease %s", context.lease_name) - return await self._execute_hook( + await self._execute_hook( self.config.before_lease, context, LogSource.BEFORE_LEASE_HOOK, socket_path, ) - async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None) -> bool: + async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None): """Execute the after-lease hook. Args: @@ -254,10 +254,10 @@ async def execute_after_lease_hook(self, context: HookContext, socket_path: str """ if not self.config.after_lease: logger.debug("No after-lease hook configured") - return True + return logger.info("Executing after-lease hook for lease %s", context.lease_name) - return await self._execute_hook( + await self._execute_hook( self.config.after_lease, context, LogSource.AFTER_LEASE_HOOK, diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py index bf86d797..0e18d332 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -384,9 +384,9 @@ async def test_hook_exit_code_mismatch_pass(self, mock_device_factory, hook_cont executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) result = await executor.execute_before_lease_hook(hook_context) assert result is True - # Verify INFO log was created + # Verify INFO log was created (using format string) mock_logger.info.assert_any_call( - "Hook failed: expected exit code 0, got 1 (on_failure=pass, continuing)" + "%s (on_failure=pass, continuing)", "Hook failed: expected exit code 0, got 1" ) async def test_hook_exit_code_mismatch_warn(self, mock_device_factory, hook_context): @@ -413,9 +413,9 @@ async def test_hook_exit_code_mismatch_warn(self, mock_device_factory, hook_cont executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) result = await executor.execute_before_lease_hook(hook_context) assert result is True - # Verify WARNING log was created + # Verify WARNING log was created (using format string) mock_logger.warning.assert_any_call( - "Hook failed: expected exit code 0, got 1 (on_failure=warn, continuing)" + "%s (on_failure=warn, continuing)", "Hook failed: expected exit code 0, got 1" ) async def test_hook_exit_code_mismatch_block(self, mock_device_factory, hook_context): From f591dc16969fea62cd50e7880c6a4e06a9888237 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 24 Nov 2025 13:43:35 -0500 Subject: [PATCH 13/30] Add strongly-typed Protobuf and gRPC codegen and refactor exporter for clarity --- python/buf.gen.yaml | 4 + .../jumpstarter/client/v1/client_pb2.pyi | 318 ++++++++ .../jumpstarter/client/v1/client_pb2_grpc.pyi | 307 +++++++ .../jumpstarter/v1/common_pb2.pyi | 96 +++ .../jumpstarter/v1/common_pb2_grpc.pyi | 20 + .../jumpstarter/v1/jumpstarter_pb2.pyi | 717 +++++++++++++++++ .../jumpstarter/v1/jumpstarter_pb2_grpc.pyi | 752 ++++++++++++++++++ .../jumpstarter/v1/kubernetes_pb2.pyi | 148 ++++ .../jumpstarter/v1/kubernetes_pb2_grpc.pyi | 20 + .../jumpstarter/v1/router_pb2.pyi | 73 ++ .../jumpstarter/v1/router_pb2_grpc.pyi | 96 +++ .../jumpstarter/config/exporter.py | 13 +- .../jumpstarter/jumpstarter/driver/base.py | 2 +- .../jumpstarter/exporter/exporter.py | 523 ++++++++---- .../jumpstarter/jumpstarter/exporter/hooks.py | 115 +-- .../jumpstarter/exporter/hooks_test.py | 214 +---- 16 files changed, 3036 insertions(+), 382 deletions(-) create mode 100644 python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi create mode 100644 python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi create mode 100644 python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi create mode 100644 python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi create mode 100644 python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi create mode 100644 python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi create mode 100644 python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi create mode 100644 python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi create mode 100644 python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi create mode 100644 python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi diff --git a/python/buf.gen.yaml b/python/buf.gen.yaml index fce4d534..78467f08 100644 --- a/python/buf.gen.yaml +++ b/python/buf.gen.yaml @@ -6,6 +6,10 @@ plugins: out: ./packages/jumpstarter-protocol/jumpstarter_protocol - remote: buf.build/grpc/python out: ./packages/jumpstarter-protocol/jumpstarter_protocol + - remote: buf.build/community/nipunn1313-mypy:v3.7.0 + out: ./packages/jumpstarter-protocol/jumpstarter_protocol + - remote: buf.build/community/nipunn1313-mypy-grpc:v3.7.0 + out: ./packages/jumpstarter-protocol/jumpstarter_protocol inputs: - git_repo: https://github.com/jumpstarter-dev/jumpstarter-protocol.git branch: main diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi new file mode 100644 index 00000000..500b1379 --- /dev/null +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi @@ -0,0 +1,318 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors +(-- api-linter: core::0215::foreign-type-reference=disabled +(-- api-linter: core::0192::has-comments=disabled +(-- api-linter: core::0191::java-package=disabled +(-- api-linter: core::0191::java-outer-classname=disabled +(-- api-linter: core::0191::java-multiple-files=disabled +""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.field_mask_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import jumpstarter.v1.common_pb2 +import jumpstarter.v1.kubernetes_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class Exporter(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + ONLINE_FIELD_NUMBER: builtins.int + STATUS_FIELD_NUMBER: builtins.int + name: builtins.str + online: builtins.bool + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + name: builtins.str = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + online: builtins.bool = ..., + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["labels", b"labels", "name", b"name", "online", b"online", "status", b"status"]) -> None: ... + +Global___Exporter: typing_extensions.TypeAlias = Exporter + +@typing.final +class Lease(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + SELECTOR_FIELD_NUMBER: builtins.int + DURATION_FIELD_NUMBER: builtins.int + EFFECTIVE_DURATION_FIELD_NUMBER: builtins.int + BEGIN_TIME_FIELD_NUMBER: builtins.int + EFFECTIVE_BEGIN_TIME_FIELD_NUMBER: builtins.int + END_TIME_FIELD_NUMBER: builtins.int + EFFECTIVE_END_TIME_FIELD_NUMBER: builtins.int + CLIENT_FIELD_NUMBER: builtins.int + EXPORTER_FIELD_NUMBER: builtins.int + CONDITIONS_FIELD_NUMBER: builtins.int + name: builtins.str + selector: builtins.str + client: builtins.str + exporter: builtins.str + @property + def duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def effective_duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def begin_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def effective_begin_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def effective_end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def conditions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[jumpstarter.v1.kubernetes_pb2.Condition]: ... + def __init__( + self, + *, + name: builtins.str = ..., + selector: builtins.str = ..., + duration: google.protobuf.duration_pb2.Duration | None = ..., + effective_duration: google.protobuf.duration_pb2.Duration | None = ..., + begin_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + effective_begin_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + effective_end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + client: builtins.str | None = ..., + exporter: builtins.str | None = ..., + conditions: collections.abc.Iterable[jumpstarter.v1.kubernetes_pb2.Condition] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_client", b"_client", "_duration", b"_duration", "_effective_begin_time", b"_effective_begin_time", "_effective_end_time", b"_effective_end_time", "_end_time", b"_end_time", "_exporter", b"_exporter", "begin_time", b"begin_time", "client", b"client", "duration", b"duration", "effective_begin_time", b"effective_begin_time", "effective_duration", b"effective_duration", "effective_end_time", b"effective_end_time", "end_time", b"end_time", "exporter", b"exporter"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_client", b"_client", "_duration", b"_duration", "_effective_begin_time", b"_effective_begin_time", "_effective_end_time", b"_effective_end_time", "_end_time", b"_end_time", "_exporter", b"_exporter", "begin_time", b"begin_time", "client", b"client", "conditions", b"conditions", "duration", b"duration", "effective_begin_time", b"effective_begin_time", "effective_duration", b"effective_duration", "effective_end_time", b"effective_end_time", "end_time", b"end_time", "exporter", b"exporter", "name", b"name", "selector", b"selector"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_begin_time", b"_begin_time"]) -> typing.Literal["begin_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_client", b"_client"]) -> typing.Literal["client"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_duration", b"_duration"]) -> typing.Literal["duration"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_effective_begin_time", b"_effective_begin_time"]) -> typing.Literal["effective_begin_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_effective_end_time", b"_effective_end_time"]) -> typing.Literal["effective_end_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_end_time", b"_end_time"]) -> typing.Literal["end_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_exporter", b"_exporter"]) -> typing.Literal["exporter"] | None: ... + +Global___Lease: typing_extensions.TypeAlias = Lease + +@typing.final +class GetExporterRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___GetExporterRequest: typing_extensions.TypeAlias = GetExporterRequest + +@typing.final +class ListExportersRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARENT_FIELD_NUMBER: builtins.int + PAGE_SIZE_FIELD_NUMBER: builtins.int + PAGE_TOKEN_FIELD_NUMBER: builtins.int + FILTER_FIELD_NUMBER: builtins.int + parent: builtins.str + page_size: builtins.int + page_token: builtins.str + filter: builtins.str + def __init__( + self, + *, + parent: builtins.str = ..., + page_size: builtins.int = ..., + page_token: builtins.str = ..., + filter: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["filter", b"filter", "page_size", b"page_size", "page_token", b"page_token", "parent", b"parent"]) -> None: ... + +Global___ListExportersRequest: typing_extensions.TypeAlias = ListExportersRequest + +@typing.final +class ListExportersResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EXPORTERS_FIELD_NUMBER: builtins.int + NEXT_PAGE_TOKEN_FIELD_NUMBER: builtins.int + next_page_token: builtins.str + @property + def exporters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___Exporter]: ... + def __init__( + self, + *, + exporters: collections.abc.Iterable[Global___Exporter] | None = ..., + next_page_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["exporters", b"exporters", "next_page_token", b"next_page_token"]) -> None: ... + +Global___ListExportersResponse: typing_extensions.TypeAlias = ListExportersResponse + +@typing.final +class GetLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___GetLeaseRequest: typing_extensions.TypeAlias = GetLeaseRequest + +@typing.final +class ListLeasesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARENT_FIELD_NUMBER: builtins.int + PAGE_SIZE_FIELD_NUMBER: builtins.int + PAGE_TOKEN_FIELD_NUMBER: builtins.int + FILTER_FIELD_NUMBER: builtins.int + ONLY_ACTIVE_FIELD_NUMBER: builtins.int + parent: builtins.str + page_size: builtins.int + page_token: builtins.str + filter: builtins.str + only_active: builtins.bool + def __init__( + self, + *, + parent: builtins.str = ..., + page_size: builtins.int = ..., + page_token: builtins.str = ..., + filter: builtins.str = ..., + only_active: builtins.bool | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_only_active", b"_only_active", "only_active", b"only_active"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_only_active", b"_only_active", "filter", b"filter", "only_active", b"only_active", "page_size", b"page_size", "page_token", b"page_token", "parent", b"parent"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_only_active", b"_only_active"]) -> typing.Literal["only_active"] | None: ... + +Global___ListLeasesRequest: typing_extensions.TypeAlias = ListLeasesRequest + +@typing.final +class ListLeasesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASES_FIELD_NUMBER: builtins.int + NEXT_PAGE_TOKEN_FIELD_NUMBER: builtins.int + next_page_token: builtins.str + @property + def leases(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___Lease]: ... + def __init__( + self, + *, + leases: collections.abc.Iterable[Global___Lease] | None = ..., + next_page_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["leases", b"leases", "next_page_token", b"next_page_token"]) -> None: ... + +Global___ListLeasesResponse: typing_extensions.TypeAlias = ListLeasesResponse + +@typing.final +class CreateLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARENT_FIELD_NUMBER: builtins.int + LEASE_ID_FIELD_NUMBER: builtins.int + LEASE_FIELD_NUMBER: builtins.int + parent: builtins.str + lease_id: builtins.str + @property + def lease(self) -> Global___Lease: ... + def __init__( + self, + *, + parent: builtins.str = ..., + lease_id: builtins.str = ..., + lease: Global___Lease | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["lease", b"lease"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["lease", b"lease", "lease_id", b"lease_id", "parent", b"parent"]) -> None: ... + +Global___CreateLeaseRequest: typing_extensions.TypeAlias = CreateLeaseRequest + +@typing.final +class UpdateLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASE_FIELD_NUMBER: builtins.int + UPDATE_MASK_FIELD_NUMBER: builtins.int + @property + def lease(self) -> Global___Lease: ... + @property + def update_mask(self) -> google.protobuf.field_mask_pb2.FieldMask: ... + def __init__( + self, + *, + lease: Global___Lease | None = ..., + update_mask: google.protobuf.field_mask_pb2.FieldMask | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["lease", b"lease", "update_mask", b"update_mask"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["lease", b"lease", "update_mask", b"update_mask"]) -> None: ... + +Global___UpdateLeaseRequest: typing_extensions.TypeAlias = UpdateLeaseRequest + +@typing.final +class DeleteLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___DeleteLeaseRequest: typing_extensions.TypeAlias = DeleteLeaseRequest diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi new file mode 100644 index 00000000..a5aa7937 --- /dev/null +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi @@ -0,0 +1,307 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors +(-- api-linter: core::0215::foreign-type-reference=disabled +(-- api-linter: core::0192::has-comments=disabled +(-- api-linter: core::0191::java-package=disabled +(-- api-linter: core::0191::java-outer-classname=disabled +(-- api-linter: core::0191::java-multiple-files=disabled +""" + +import abc +import collections.abc +import google.protobuf.empty_pb2 +import grpc +import grpc.aio +import jumpstarter.client.v1.client_pb2 +import sys +import typing + +if sys.version_info >= (3, 13): + import typing as typing_extensions +else: + import typing_extensions + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str +_ClientServiceGetExporterType = typing_extensions.TypeVar( + '_ClientServiceGetExporterType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], +) + +_ClientServiceListExportersType = typing_extensions.TypeVar( + '_ClientServiceListExportersType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], +) + +_ClientServiceGetLeaseType = typing_extensions.TypeVar( + '_ClientServiceGetLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], +) + +_ClientServiceListLeasesType = typing_extensions.TypeVar( + '_ClientServiceListLeasesType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], +) + +_ClientServiceCreateLeaseType = typing_extensions.TypeVar( + '_ClientServiceCreateLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], +) + +_ClientServiceUpdateLeaseType = typing_extensions.TypeVar( + '_ClientServiceUpdateLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], +) + +_ClientServiceDeleteLeaseType = typing_extensions.TypeVar( + '_ClientServiceDeleteLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], +) + +class ClientServiceStub(typing.Generic[_ClientServiceGetExporterType, _ClientServiceListExportersType, _ClientServiceGetLeaseType, _ClientServiceListLeasesType, _ClientServiceCreateLeaseType, _ClientServiceUpdateLeaseType, _ClientServiceDeleteLeaseType]): + @typing.overload + def __init__(self: ClientServiceStub[ + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: ClientServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + ], channel: grpc.aio.Channel) -> None: ... + + GetExporter: _ClientServiceGetExporterType + + ListExporters: _ClientServiceListExportersType + + GetLease: _ClientServiceGetLeaseType + + ListLeases: _ClientServiceListLeasesType + + CreateLease: _ClientServiceCreateLeaseType + + UpdateLease: _ClientServiceUpdateLeaseType + + DeleteLease: _ClientServiceDeleteLeaseType + +ClientServiceAsyncStub: typing_extensions.TypeAlias = ClientServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], +] + +class ClientServiceServicer(metaclass=abc.ABCMeta): + @abc.abstractmethod + def GetExporter( + self, + request: jumpstarter.client.v1.client_pb2.GetExporterRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Exporter, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Exporter]]: ... + + @abc.abstractmethod + def ListExporters( + self, + request: jumpstarter.client.v1.client_pb2.ListExportersRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.ListExportersResponse, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.ListExportersResponse]]: ... + + @abc.abstractmethod + def GetLease( + self, + request: jumpstarter.client.v1.client_pb2.GetLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Lease, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Lease]]: ... + + @abc.abstractmethod + def ListLeases( + self, + request: jumpstarter.client.v1.client_pb2.ListLeasesRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.ListLeasesResponse, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.ListLeasesResponse]]: ... + + @abc.abstractmethod + def CreateLease( + self, + request: jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Lease, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Lease]]: ... + + @abc.abstractmethod + def UpdateLease( + self, + request: jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Lease, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Lease]]: ... + + @abc.abstractmethod + def DeleteLease( + self, + request: jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[google.protobuf.empty_pb2.Empty, collections.abc.Awaitable[google.protobuf.empty_pb2.Empty]]: ... + +def add_ClientServiceServicer_to_server(servicer: ClientServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi new file mode 100644 index 00000000..f433f1db --- /dev/null +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi @@ -0,0 +1,96 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.enum_type_wrapper +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _ExporterStatus: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _ExporterStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ExporterStatus.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + EXPORTER_STATUS_UNSPECIFIED: _ExporterStatus.ValueType # 0 + """Unspecified exporter status""" + EXPORTER_STATUS_OFFLINE: _ExporterStatus.ValueType # 1 + """Exporter is offline""" + EXPORTER_STATUS_AVAILABLE: _ExporterStatus.ValueType # 2 + """Exporter is available to be leased""" + EXPORTER_STATUS_BEFORE_LEASE_HOOK: _ExporterStatus.ValueType # 3 + """Exporter is executing before lease hook(s)""" + EXPORTER_STATUS_LEASE_READY: _ExporterStatus.ValueType # 4 + """Exporter is leased and ready to accept commands""" + EXPORTER_STATUS_AFTER_LEASE_HOOK: _ExporterStatus.ValueType # 5 + """Exporter is executing after lease hook(s)""" + EXPORTER_STATUS_BEFORE_LEASE_HOOK_FAILED: _ExporterStatus.ValueType # 6 + """Exporter before lease hook failed""" + EXPORTER_STATUS_AFTER_LEASE_HOOK_FAILED: _ExporterStatus.ValueType # 7 + """Exporter after lease hook failed""" + +class ExporterStatus(_ExporterStatus, metaclass=_ExporterStatusEnumTypeWrapper): + """Shared types used across multiple Jumpstarter services + + Exporter status information + """ + +EXPORTER_STATUS_UNSPECIFIED: ExporterStatus.ValueType # 0 +"""Unspecified exporter status""" +EXPORTER_STATUS_OFFLINE: ExporterStatus.ValueType # 1 +"""Exporter is offline""" +EXPORTER_STATUS_AVAILABLE: ExporterStatus.ValueType # 2 +"""Exporter is available to be leased""" +EXPORTER_STATUS_BEFORE_LEASE_HOOK: ExporterStatus.ValueType # 3 +"""Exporter is executing before lease hook(s)""" +EXPORTER_STATUS_LEASE_READY: ExporterStatus.ValueType # 4 +"""Exporter is leased and ready to accept commands""" +EXPORTER_STATUS_AFTER_LEASE_HOOK: ExporterStatus.ValueType # 5 +"""Exporter is executing after lease hook(s)""" +EXPORTER_STATUS_BEFORE_LEASE_HOOK_FAILED: ExporterStatus.ValueType # 6 +"""Exporter before lease hook failed""" +EXPORTER_STATUS_AFTER_LEASE_HOOK_FAILED: ExporterStatus.ValueType # 7 +"""Exporter after lease hook failed""" +Global___ExporterStatus: typing_extensions.TypeAlias = ExporterStatus + +class _LogSource: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _LogSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_LogSource.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + LOG_SOURCE_UNSPECIFIED: _LogSource.ValueType # 0 + """Unspecified log source""" + LOG_SOURCE_DRIVER: _LogSource.ValueType # 1 + """Driver/device logs""" + LOG_SOURCE_BEFORE_LEASE_HOOK: _LogSource.ValueType # 2 + """beforeLease hook execution logs""" + LOG_SOURCE_AFTER_LEASE_HOOK: _LogSource.ValueType # 3 + """afterLease hook execution logs""" + LOG_SOURCE_SYSTEM: _LogSource.ValueType # 4 + """System/exporter logs""" + +class LogSource(_LogSource, metaclass=_LogSourceEnumTypeWrapper): + """Source of log stream messages""" + +LOG_SOURCE_UNSPECIFIED: LogSource.ValueType # 0 +"""Unspecified log source""" +LOG_SOURCE_DRIVER: LogSource.ValueType # 1 +"""Driver/device logs""" +LOG_SOURCE_BEFORE_LEASE_HOOK: LogSource.ValueType # 2 +"""beforeLease hook execution logs""" +LOG_SOURCE_AFTER_LEASE_HOOK: LogSource.ValueType # 3 +"""afterLease hook execution logs""" +LOG_SOURCE_SYSTEM: LogSource.ValueType # 4 +"""System/exporter logs""" +Global___LogSource: typing_extensions.TypeAlias = LogSource diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi new file mode 100644 index 00000000..6aac9706 --- /dev/null +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi @@ -0,0 +1,20 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import grpc +import grpc.aio +import typing + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi new file mode 100644 index 00000000..762c46c6 --- /dev/null +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi @@ -0,0 +1,717 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.struct_pb2 +import google.protobuf.timestamp_pb2 +import jumpstarter.v1.common_pb2 +import jumpstarter.v1.kubernetes_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class RegisterRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + LABELS_FIELD_NUMBER: builtins.int + REPORTS_FIELD_NUMBER: builtins.int + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """additional context: + - token/authentication mechanism + """ + + @property + def reports(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___DriverInstanceReport]: + """standard labels: + jumpstarter.dev/hostname= + jumpstarter.dev/name= + """ + + def __init__( + self, + *, + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + reports: collections.abc.Iterable[Global___DriverInstanceReport] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["labels", b"labels", "reports", b"reports"]) -> None: ... + +Global___RegisterRequest: typing_extensions.TypeAlias = RegisterRequest + +@typing.final +class DriverInstanceReport(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + @typing.final + class MethodsDescriptionEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + UUID_FIELD_NUMBER: builtins.int + PARENT_UUID_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + METHODS_DESCRIPTION_FIELD_NUMBER: builtins.int + uuid: builtins.str + """a unique id within the exporter""" + parent_uuid: builtins.str + """optional, if device has a parent device""" + description: builtins.str + """optional custom driver description for CLI""" + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + @property + def methods_description(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """method name -> help text for CLI""" + + def __init__( + self, + *, + uuid: builtins.str = ..., + parent_uuid: builtins.str | None = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + description: builtins.str | None = ..., + methods_description: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_description", b"_description", "_parent_uuid", b"_parent_uuid", "description", b"description", "parent_uuid", b"parent_uuid"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_description", b"_description", "_parent_uuid", b"_parent_uuid", "description", b"description", "labels", b"labels", "methods_description", b"methods_description", "parent_uuid", b"parent_uuid", "uuid", b"uuid"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_description", b"_description"]) -> typing.Literal["description"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_parent_uuid", b"_parent_uuid"]) -> typing.Literal["parent_uuid"] | None: ... + +Global___DriverInstanceReport: typing_extensions.TypeAlias = DriverInstanceReport + +@typing.final +class RegisterResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + uuid: builtins.str + def __init__( + self, + *, + uuid: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["uuid", b"uuid"]) -> None: ... + +Global___RegisterResponse: typing_extensions.TypeAlias = RegisterResponse + +@typing.final +class UnregisterRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REASON_FIELD_NUMBER: builtins.int + reason: builtins.str + def __init__( + self, + *, + reason: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["reason", b"reason"]) -> None: ... + +Global___UnregisterRequest: typing_extensions.TypeAlias = UnregisterRequest + +@typing.final +class UnregisterResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___UnregisterResponse: typing_extensions.TypeAlias = UnregisterResponse + +@typing.final +class ListenRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASE_NAME_FIELD_NUMBER: builtins.int + lease_name: builtins.str + def __init__( + self, + *, + lease_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["lease_name", b"lease_name"]) -> None: ... + +Global___ListenRequest: typing_extensions.TypeAlias = ListenRequest + +@typing.final +class ListenResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ROUTER_ENDPOINT_FIELD_NUMBER: builtins.int + ROUTER_TOKEN_FIELD_NUMBER: builtins.int + router_endpoint: builtins.str + router_token: builtins.str + def __init__( + self, + *, + router_endpoint: builtins.str = ..., + router_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["router_endpoint", b"router_endpoint", "router_token", b"router_token"]) -> None: ... + +Global___ListenResponse: typing_extensions.TypeAlias = ListenResponse + +@typing.final +class StatusRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___StatusRequest: typing_extensions.TypeAlias = StatusRequest + +@typing.final +class StatusResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASED_FIELD_NUMBER: builtins.int + LEASE_NAME_FIELD_NUMBER: builtins.int + CLIENT_NAME_FIELD_NUMBER: builtins.int + leased: builtins.bool + lease_name: builtins.str + client_name: builtins.str + def __init__( + self, + *, + leased: builtins.bool = ..., + lease_name: builtins.str | None = ..., + client_name: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_client_name", b"_client_name", "_lease_name", b"_lease_name", "client_name", b"client_name", "lease_name", b"lease_name"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_client_name", b"_client_name", "_lease_name", b"_lease_name", "client_name", b"client_name", "lease_name", b"lease_name", "leased", b"leased"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_client_name", b"_client_name"]) -> typing.Literal["client_name"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_lease_name", b"_lease_name"]) -> typing.Literal["lease_name"] | None: ... + +Global___StatusResponse: typing_extensions.TypeAlias = StatusResponse + +@typing.final +class DialRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASE_NAME_FIELD_NUMBER: builtins.int + lease_name: builtins.str + def __init__( + self, + *, + lease_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["lease_name", b"lease_name"]) -> None: ... + +Global___DialRequest: typing_extensions.TypeAlias = DialRequest + +@typing.final +class DialResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ROUTER_ENDPOINT_FIELD_NUMBER: builtins.int + ROUTER_TOKEN_FIELD_NUMBER: builtins.int + router_endpoint: builtins.str + router_token: builtins.str + def __init__( + self, + *, + router_endpoint: builtins.str = ..., + router_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["router_endpoint", b"router_endpoint", "router_token", b"router_token"]) -> None: ... + +Global___DialResponse: typing_extensions.TypeAlias = DialResponse + +@typing.final +class AuditStreamRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EXPORTER_UUID_FIELD_NUMBER: builtins.int + DRIVER_INSTANCE_UUID_FIELD_NUMBER: builtins.int + SEVERITY_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + exporter_uuid: builtins.str + """additional context: + - token/authentication mechanism + """ + driver_instance_uuid: builtins.str + severity: builtins.str + message: builtins.str + def __init__( + self, + *, + exporter_uuid: builtins.str = ..., + driver_instance_uuid: builtins.str = ..., + severity: builtins.str = ..., + message: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["driver_instance_uuid", b"driver_instance_uuid", "exporter_uuid", b"exporter_uuid", "message", b"message", "severity", b"severity"]) -> None: ... + +Global___AuditStreamRequest: typing_extensions.TypeAlias = AuditStreamRequest + +@typing.final +class ReportStatusRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + message: builtins.str + """Optional human-readable status message""" + def __init__( + self, + *, + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_message", b"_message", "message", b"message"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_message", b"_message", "message", b"message", "status", b"status"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + +Global___ReportStatusRequest: typing_extensions.TypeAlias = ReportStatusRequest + +@typing.final +class ReportStatusResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ReportStatusResponse: typing_extensions.TypeAlias = ReportStatusResponse + +@typing.final +class GetReportResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + UUID_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + REPORTS_FIELD_NUMBER: builtins.int + ALTERNATIVE_ENDPOINTS_FIELD_NUMBER: builtins.int + uuid: builtins.str + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + @property + def reports(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___DriverInstanceReport]: + """standard labels: + jumpstarter.dev/hostname= + jumpstarter.dev/name= + """ + + @property + def alternative_endpoints(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___Endpoint]: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + reports: collections.abc.Iterable[Global___DriverInstanceReport] | None = ..., + alternative_endpoints: collections.abc.Iterable[Global___Endpoint] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["alternative_endpoints", b"alternative_endpoints", "labels", b"labels", "reports", b"reports", "uuid", b"uuid"]) -> None: ... + +Global___GetReportResponse: typing_extensions.TypeAlias = GetReportResponse + +@typing.final +class Endpoint(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENDPOINT_FIELD_NUMBER: builtins.int + CERTIFICATE_FIELD_NUMBER: builtins.int + CLIENT_CERTIFICATE_FIELD_NUMBER: builtins.int + CLIENT_PRIVATE_KEY_FIELD_NUMBER: builtins.int + endpoint: builtins.str + certificate: builtins.str + client_certificate: builtins.str + client_private_key: builtins.str + def __init__( + self, + *, + endpoint: builtins.str = ..., + certificate: builtins.str = ..., + client_certificate: builtins.str = ..., + client_private_key: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["certificate", b"certificate", "client_certificate", b"client_certificate", "client_private_key", b"client_private_key", "endpoint", b"endpoint"]) -> None: ... + +Global___Endpoint: typing_extensions.TypeAlias = Endpoint + +@typing.final +class DriverCallRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + METHOD_FIELD_NUMBER: builtins.int + ARGS_FIELD_NUMBER: builtins.int + uuid: builtins.str + method: builtins.str + @property + def args(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.struct_pb2.Value]: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + method: builtins.str = ..., + args: collections.abc.Iterable[google.protobuf.struct_pb2.Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["args", b"args", "method", b"method", "uuid", b"uuid"]) -> None: ... + +Global___DriverCallRequest: typing_extensions.TypeAlias = DriverCallRequest + +@typing.final +class DriverCallResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + RESULT_FIELD_NUMBER: builtins.int + uuid: builtins.str + @property + def result(self) -> google.protobuf.struct_pb2.Value: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + result: google.protobuf.struct_pb2.Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["result", b"result"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["result", b"result", "uuid", b"uuid"]) -> None: ... + +Global___DriverCallResponse: typing_extensions.TypeAlias = DriverCallResponse + +@typing.final +class StreamingDriverCallRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + METHOD_FIELD_NUMBER: builtins.int + ARGS_FIELD_NUMBER: builtins.int + uuid: builtins.str + method: builtins.str + @property + def args(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.struct_pb2.Value]: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + method: builtins.str = ..., + args: collections.abc.Iterable[google.protobuf.struct_pb2.Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["args", b"args", "method", b"method", "uuid", b"uuid"]) -> None: ... + +Global___StreamingDriverCallRequest: typing_extensions.TypeAlias = StreamingDriverCallRequest + +@typing.final +class StreamingDriverCallResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + RESULT_FIELD_NUMBER: builtins.int + uuid: builtins.str + @property + def result(self) -> google.protobuf.struct_pb2.Value: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + result: google.protobuf.struct_pb2.Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["result", b"result"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["result", b"result", "uuid", b"uuid"]) -> None: ... + +Global___StreamingDriverCallResponse: typing_extensions.TypeAlias = StreamingDriverCallResponse + +@typing.final +class LogStreamResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + SEVERITY_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + SOURCE_FIELD_NUMBER: builtins.int + uuid: builtins.str + severity: builtins.str + message: builtins.str + source: jumpstarter.v1.common_pb2.LogSource.ValueType + """New optional field""" + def __init__( + self, + *, + uuid: builtins.str = ..., + severity: builtins.str = ..., + message: builtins.str = ..., + source: jumpstarter.v1.common_pb2.LogSource.ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_source", b"_source", "source", b"source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_source", b"_source", "message", b"message", "severity", b"severity", "source", b"source", "uuid", b"uuid"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_source", b"_source"]) -> typing.Literal["source"] | None: ... + +Global___LogStreamResponse: typing_extensions.TypeAlias = LogStreamResponse + +@typing.final +class ResetRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ResetRequest: typing_extensions.TypeAlias = ResetRequest + +@typing.final +class ResetResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ResetResponse: typing_extensions.TypeAlias = ResetResponse + +@typing.final +class GetLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___GetLeaseRequest: typing_extensions.TypeAlias = GetLeaseRequest + +@typing.final +class GetLeaseResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DURATION_FIELD_NUMBER: builtins.int + SELECTOR_FIELD_NUMBER: builtins.int + BEGIN_TIME_FIELD_NUMBER: builtins.int + END_TIME_FIELD_NUMBER: builtins.int + EXPORTER_UUID_FIELD_NUMBER: builtins.int + CONDITIONS_FIELD_NUMBER: builtins.int + exporter_uuid: builtins.str + @property + def duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def selector(self) -> jumpstarter.v1.kubernetes_pb2.LabelSelector: ... + @property + def begin_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def conditions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[jumpstarter.v1.kubernetes_pb2.Condition]: ... + def __init__( + self, + *, + duration: google.protobuf.duration_pb2.Duration | None = ..., + selector: jumpstarter.v1.kubernetes_pb2.LabelSelector | None = ..., + begin_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + exporter_uuid: builtins.str | None = ..., + conditions: collections.abc.Iterable[jumpstarter.v1.kubernetes_pb2.Condition] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_end_time", b"_end_time", "_exporter_uuid", b"_exporter_uuid", "begin_time", b"begin_time", "duration", b"duration", "end_time", b"end_time", "exporter_uuid", b"exporter_uuid", "selector", b"selector"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_end_time", b"_end_time", "_exporter_uuid", b"_exporter_uuid", "begin_time", b"begin_time", "conditions", b"conditions", "duration", b"duration", "end_time", b"end_time", "exporter_uuid", b"exporter_uuid", "selector", b"selector"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_begin_time", b"_begin_time"]) -> typing.Literal["begin_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_end_time", b"_end_time"]) -> typing.Literal["end_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_exporter_uuid", b"_exporter_uuid"]) -> typing.Literal["exporter_uuid"] | None: ... + +Global___GetLeaseResponse: typing_extensions.TypeAlias = GetLeaseResponse + +@typing.final +class RequestLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DURATION_FIELD_NUMBER: builtins.int + SELECTOR_FIELD_NUMBER: builtins.int + @property + def duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def selector(self) -> jumpstarter.v1.kubernetes_pb2.LabelSelector: ... + def __init__( + self, + *, + duration: google.protobuf.duration_pb2.Duration | None = ..., + selector: jumpstarter.v1.kubernetes_pb2.LabelSelector | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["duration", b"duration", "selector", b"selector"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["duration", b"duration", "selector", b"selector"]) -> None: ... + +Global___RequestLeaseRequest: typing_extensions.TypeAlias = RequestLeaseRequest + +@typing.final +class RequestLeaseResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___RequestLeaseResponse: typing_extensions.TypeAlias = RequestLeaseResponse + +@typing.final +class ReleaseLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___ReleaseLeaseRequest: typing_extensions.TypeAlias = ReleaseLeaseRequest + +@typing.final +class ReleaseLeaseResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ReleaseLeaseResponse: typing_extensions.TypeAlias = ReleaseLeaseResponse + +@typing.final +class ListLeasesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ListLeasesRequest: typing_extensions.TypeAlias = ListLeasesRequest + +@typing.final +class ListLeasesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAMES_FIELD_NUMBER: builtins.int + @property + def names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + names: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["names", b"names"]) -> None: ... + +Global___ListLeasesResponse: typing_extensions.TypeAlias = ListLeasesResponse + +@typing.final +class GetStatusRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___GetStatusRequest: typing_extensions.TypeAlias = GetStatusRequest + +@typing.final +class GetStatusResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + message: builtins.str + def __init__( + self, + *, + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_message", b"_message", "message", b"message"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_message", b"_message", "message", b"message", "status", b"status"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + +Global___GetStatusResponse: typing_extensions.TypeAlias = GetStatusResponse diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi new file mode 100644 index 00000000..78c9ffbb --- /dev/null +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi @@ -0,0 +1,752 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import google.protobuf.empty_pb2 +import grpc +import grpc.aio +import jumpstarter.v1.jumpstarter_pb2 +import sys +import typing + +if sys.version_info >= (3, 13): + import typing as typing_extensions +else: + import typing_extensions + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str +_ControllerServiceRegisterType = typing_extensions.TypeVar( + '_ControllerServiceRegisterType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], +) + +_ControllerServiceUnregisterType = typing_extensions.TypeVar( + '_ControllerServiceUnregisterType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], +) + +_ControllerServiceReportStatusType = typing_extensions.TypeVar( + '_ControllerServiceReportStatusType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], +) + +_ControllerServiceListenType = typing_extensions.TypeVar( + '_ControllerServiceListenType', + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], +) + +_ControllerServiceStatusType = typing_extensions.TypeVar( + '_ControllerServiceStatusType', + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], +) + +_ControllerServiceDialType = typing_extensions.TypeVar( + '_ControllerServiceDialType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], +) + +_ControllerServiceAuditStreamType = typing_extensions.TypeVar( + '_ControllerServiceAuditStreamType', + grpc.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + default=grpc.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], +) + +_ControllerServiceGetLeaseType = typing_extensions.TypeVar( + '_ControllerServiceGetLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], +) + +_ControllerServiceRequestLeaseType = typing_extensions.TypeVar( + '_ControllerServiceRequestLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], +) + +_ControllerServiceReleaseLeaseType = typing_extensions.TypeVar( + '_ControllerServiceReleaseLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], +) + +_ControllerServiceListLeasesType = typing_extensions.TypeVar( + '_ControllerServiceListLeasesType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], +) + +class ControllerServiceStub(typing.Generic[_ControllerServiceRegisterType, _ControllerServiceUnregisterType, _ControllerServiceReportStatusType, _ControllerServiceListenType, _ControllerServiceStatusType, _ControllerServiceDialType, _ControllerServiceAuditStreamType, _ControllerServiceGetLeaseType, _ControllerServiceRequestLeaseType, _ControllerServiceReleaseLeaseType, _ControllerServiceListLeasesType]): + """A service where a exporter can connect to make itself available""" + + @typing.overload + def __init__(self: ControllerServiceStub[ + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: ControllerServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.aio.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + ], channel: grpc.aio.Channel) -> None: ... + + Register: _ControllerServiceRegisterType + """Exporter registration""" + + Unregister: _ControllerServiceUnregisterType + """Exporter disconnection + Disconnecting with bye will invalidate any existing router tokens + we will eventually have a mechanism to tell the router this token + has been invalidated + """ + + ReportStatus: _ControllerServiceReportStatusType + """Exporter status report + Allows exporters to report their own status to the controller + """ + + Listen: _ControllerServiceListenType + """Exporter listening + Returns stream tokens for accepting incoming client connections + """ + + Status: _ControllerServiceStatusType + """Exporter status + Returns lease status for the exporter + """ + + Dial: _ControllerServiceDialType + """Client connecting + Returns stream token for connecting to the desired exporter + Leases are checked before token issuance + """ + + AuditStream: _ControllerServiceAuditStreamType + """Audit events from the exporters + audit events are used to track the exporter's activity + """ + + GetLease: _ControllerServiceGetLeaseType + """Get Lease""" + + RequestLease: _ControllerServiceRequestLeaseType + """Request Lease""" + + ReleaseLease: _ControllerServiceReleaseLeaseType + """Release Lease""" + + ListLeases: _ControllerServiceListLeasesType + """List Leases""" + +ControllerServiceAsyncStub: typing_extensions.TypeAlias = ControllerServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.aio.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], +] + +class ControllerServiceServicer(metaclass=abc.ABCMeta): + """A service where a exporter can connect to make itself available""" + + @abc.abstractmethod + def Register( + self, + request: jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.RegisterResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.RegisterResponse]]: + """Exporter registration""" + + @abc.abstractmethod + def Unregister( + self, + request: jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.UnregisterResponse]]: + """Exporter disconnection + Disconnecting with bye will invalidate any existing router tokens + we will eventually have a mechanism to tell the router this token + has been invalidated + """ + + @abc.abstractmethod + def ReportStatus( + self, + request: jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse]]: + """Exporter status report + Allows exporters to report their own status to the controller + """ + + @abc.abstractmethod + def Listen( + self, + request: jumpstarter.v1.jumpstarter_pb2.ListenRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.ListenResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.ListenResponse]]: + """Exporter listening + Returns stream tokens for accepting incoming client connections + """ + + @abc.abstractmethod + def Status( + self, + request: jumpstarter.v1.jumpstarter_pb2.StatusRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.StatusResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.StatusResponse]]: + """Exporter status + Returns lease status for the exporter + """ + + @abc.abstractmethod + def Dial( + self, + request: jumpstarter.v1.jumpstarter_pb2.DialRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.DialResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.DialResponse]]: + """Client connecting + Returns stream token for connecting to the desired exporter + Leases are checked before token issuance + """ + + @abc.abstractmethod + def AuditStream( + self, + request_iterator: _MaybeAsyncIterator[jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest], + context: _ServicerContext, + ) -> typing.Union[google.protobuf.empty_pb2.Empty, collections.abc.Awaitable[google.protobuf.empty_pb2.Empty]]: + """Audit events from the exporters + audit events are used to track the exporter's activity + """ + + @abc.abstractmethod + def GetLease( + self, + request: jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse]]: + """Get Lease""" + + @abc.abstractmethod + def RequestLease( + self, + request: jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse]]: + """Request Lease""" + + @abc.abstractmethod + def ReleaseLease( + self, + request: jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse]]: + """Release Lease""" + + @abc.abstractmethod + def ListLeases( + self, + request: jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse]]: + """List Leases""" + +def add_ControllerServiceServicer_to_server(servicer: ControllerServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... + +_ExporterServiceGetReportType = typing_extensions.TypeVar( + '_ExporterServiceGetReportType', + grpc.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], +) + +_ExporterServiceDriverCallType = typing_extensions.TypeVar( + '_ExporterServiceDriverCallType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], +) + +_ExporterServiceStreamingDriverCallType = typing_extensions.TypeVar( + '_ExporterServiceStreamingDriverCallType', + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], +) + +_ExporterServiceLogStreamType = typing_extensions.TypeVar( + '_ExporterServiceLogStreamType', + grpc.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], +) + +_ExporterServiceResetType = typing_extensions.TypeVar( + '_ExporterServiceResetType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], +) + +_ExporterServiceGetStatusType = typing_extensions.TypeVar( + '_ExporterServiceGetStatusType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], +) + +class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _ExporterServiceDriverCallType, _ExporterServiceStreamingDriverCallType, _ExporterServiceLogStreamType, _ExporterServiceResetType, _ExporterServiceGetStatusType]): + """A service a exporter can share locally to be used without a server + Channel/Call credentials are used to authenticate the client, and routing to the right exporter + """ + + @typing.overload + def __init__(self: ExporterServiceStub[ + grpc.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: ExporterServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + ], channel: grpc.aio.Channel) -> None: ... + + GetReport: _ExporterServiceGetReportType + """Exporter registration""" + + DriverCall: _ExporterServiceDriverCallType + + StreamingDriverCall: _ExporterServiceStreamingDriverCallType + + LogStream: _ExporterServiceLogStreamType + + Reset: _ExporterServiceResetType + + GetStatus: _ExporterServiceGetStatusType + +ExporterServiceAsyncStub: typing_extensions.TypeAlias = ExporterServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], +] + +class ExporterServiceServicer(metaclass=abc.ABCMeta): + """A service a exporter can share locally to be used without a server + Channel/Call credentials are used to authenticate the client, and routing to the right exporter + """ + + @abc.abstractmethod + def GetReport( + self, + request: google.protobuf.empty_pb2.Empty, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetReportResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetReportResponse]]: + """Exporter registration""" + + @abc.abstractmethod + def DriverCall( + self, + request: jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.DriverCallResponse]]: ... + + @abc.abstractmethod + def StreamingDriverCall( + self, + request: jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse]]: ... + + @abc.abstractmethod + def LogStream( + self, + request: google.protobuf.empty_pb2.Empty, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.LogStreamResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.LogStreamResponse]]: ... + + @abc.abstractmethod + def Reset( + self, + request: jumpstarter.v1.jumpstarter_pb2.ResetRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ResetResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ResetResponse]]: ... + + @abc.abstractmethod + def GetStatus( + self, + request: jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetStatusResponse]]: ... + +def add_ExporterServiceServicer_to_server(servicer: ExporterServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi new file mode 100644 index 00000000..e07fb625 --- /dev/null +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi @@ -0,0 +1,148 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class LabelSelectorRequirement(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + OPERATOR_FIELD_NUMBER: builtins.int + VALUES_FIELD_NUMBER: builtins.int + key: builtins.str + operator: builtins.str + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + key: builtins.str = ..., + operator: builtins.str = ..., + values: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "operator", b"operator", "values", b"values"]) -> None: ... + +Global___LabelSelectorRequirement: typing_extensions.TypeAlias = LabelSelectorRequirement + +@typing.final +class LabelSelector(google.protobuf.message.Message): + """Reference: https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/label-selector/""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class MatchLabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + MATCH_EXPRESSIONS_FIELD_NUMBER: builtins.int + MATCH_LABELS_FIELD_NUMBER: builtins.int + @property + def match_expressions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___LabelSelectorRequirement]: ... + @property + def match_labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + match_expressions: collections.abc.Iterable[Global___LabelSelectorRequirement] | None = ..., + match_labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["match_expressions", b"match_expressions", "match_labels", b"match_labels"]) -> None: ... + +Global___LabelSelector: typing_extensions.TypeAlias = LabelSelector + +@typing.final +class Time(google.protobuf.message.Message): + """Reference: https://github.com/kubernetes/kubernetes/blob/v1.31.1/staging/src/k8s.io/apimachinery/pkg/apis/meta/v1/generated.proto""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SECONDS_FIELD_NUMBER: builtins.int + NANOS_FIELD_NUMBER: builtins.int + seconds: builtins.int + nanos: builtins.int + def __init__( + self, + *, + seconds: builtins.int | None = ..., + nanos: builtins.int | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_nanos", b"_nanos", "_seconds", b"_seconds", "nanos", b"nanos", "seconds", b"seconds"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_nanos", b"_nanos", "_seconds", b"_seconds", "nanos", b"nanos", "seconds", b"seconds"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_nanos", b"_nanos"]) -> typing.Literal["nanos"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_seconds", b"_seconds"]) -> typing.Literal["seconds"] | None: ... + +Global___Time: typing_extensions.TypeAlias = Time + +@typing.final +class Condition(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TYPE_FIELD_NUMBER: builtins.int + STATUS_FIELD_NUMBER: builtins.int + OBSERVEDGENERATION_FIELD_NUMBER: builtins.int + LASTTRANSITIONTIME_FIELD_NUMBER: builtins.int + REASON_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + type: builtins.str + status: builtins.str + observedGeneration: builtins.int + reason: builtins.str + message: builtins.str + @property + def lastTransitionTime(self) -> Global___Time: ... + def __init__( + self, + *, + type: builtins.str | None = ..., + status: builtins.str | None = ..., + observedGeneration: builtins.int | None = ..., + lastTransitionTime: Global___Time | None = ..., + reason: builtins.str | None = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_lastTransitionTime", b"_lastTransitionTime", "_message", b"_message", "_observedGeneration", b"_observedGeneration", "_reason", b"_reason", "_status", b"_status", "_type", b"_type", "lastTransitionTime", b"lastTransitionTime", "message", b"message", "observedGeneration", b"observedGeneration", "reason", b"reason", "status", b"status", "type", b"type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_lastTransitionTime", b"_lastTransitionTime", "_message", b"_message", "_observedGeneration", b"_observedGeneration", "_reason", b"_reason", "_status", b"_status", "_type", b"_type", "lastTransitionTime", b"lastTransitionTime", "message", b"message", "observedGeneration", b"observedGeneration", "reason", b"reason", "status", b"status", "type", b"type"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_lastTransitionTime", b"_lastTransitionTime"]) -> typing.Literal["lastTransitionTime"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_observedGeneration", b"_observedGeneration"]) -> typing.Literal["observedGeneration"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_reason", b"_reason"]) -> typing.Literal["reason"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_status", b"_status"]) -> typing.Literal["status"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_type", b"_type"]) -> typing.Literal["type"] | None: ... + +Global___Condition: typing_extensions.TypeAlias = Condition diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi new file mode 100644 index 00000000..6aac9706 --- /dev/null +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi @@ -0,0 +1,20 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import grpc +import grpc.aio +import typing + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi new file mode 100644 index 00000000..905fc6b8 --- /dev/null +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi @@ -0,0 +1,73 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _FrameType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _FrameTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FrameType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + FRAME_TYPE_DATA: _FrameType.ValueType # 0 + FRAME_TYPE_RST_STREAM: _FrameType.ValueType # 3 + FRAME_TYPE_PING: _FrameType.ValueType # 6 + FRAME_TYPE_GOAWAY: _FrameType.ValueType # 7 + +class FrameType(_FrameType, metaclass=_FrameTypeEnumTypeWrapper): ... + +FRAME_TYPE_DATA: FrameType.ValueType # 0 +FRAME_TYPE_RST_STREAM: FrameType.ValueType # 3 +FRAME_TYPE_PING: FrameType.ValueType # 6 +FRAME_TYPE_GOAWAY: FrameType.ValueType # 7 +Global___FrameType: typing_extensions.TypeAlias = FrameType + +@typing.final +class StreamRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PAYLOAD_FIELD_NUMBER: builtins.int + FRAME_TYPE_FIELD_NUMBER: builtins.int + payload: builtins.bytes + frame_type: Global___FrameType.ValueType + def __init__( + self, + *, + payload: builtins.bytes = ..., + frame_type: Global___FrameType.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["frame_type", b"frame_type", "payload", b"payload"]) -> None: ... + +Global___StreamRequest: typing_extensions.TypeAlias = StreamRequest + +@typing.final +class StreamResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PAYLOAD_FIELD_NUMBER: builtins.int + FRAME_TYPE_FIELD_NUMBER: builtins.int + payload: builtins.bytes + frame_type: Global___FrameType.ValueType + def __init__( + self, + *, + payload: builtins.bytes = ..., + frame_type: Global___FrameType.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["frame_type", b"frame_type", "payload", b"payload"]) -> None: ... + +Global___StreamResponse: typing_extensions.TypeAlias = StreamResponse diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi new file mode 100644 index 00000000..03277884 --- /dev/null +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi @@ -0,0 +1,96 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import grpc +import grpc.aio +import jumpstarter.v1.router_pb2 +import sys +import typing + +if sys.version_info >= (3, 13): + import typing as typing_extensions +else: + import typing_extensions + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str +_RouterServiceStreamType = typing_extensions.TypeVar( + '_RouterServiceStreamType', + grpc.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + grpc.aio.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + default=grpc.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], +) + +class RouterServiceStub(typing.Generic[_RouterServiceStreamType]): + """StreamService + Claims: + iss: jumpstarter controller + aud: jumpstarter router + sub: jumpstarter client/exporter + stream: stream id + """ + + @typing.overload + def __init__(self: RouterServiceStub[ + grpc.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: RouterServiceStub[ + grpc.aio.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + ], channel: grpc.aio.Channel) -> None: ... + + Stream: _RouterServiceStreamType + """Stream connects caller to another caller of the same stream""" + +RouterServiceAsyncStub: typing_extensions.TypeAlias = RouterServiceStub[ + grpc.aio.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], +] + +class RouterServiceServicer(metaclass=abc.ABCMeta): + """StreamService + Claims: + iss: jumpstarter controller + aud: jumpstarter router + sub: jumpstarter client/exporter + stream: stream id + """ + + @abc.abstractmethod + def Stream( + self, + request_iterator: _MaybeAsyncIterator[jumpstarter.v1.router_pb2.StreamRequest], + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.router_pb2.StreamResponse], collections.abc.AsyncIterator[jumpstarter.v1.router_pb2.StreamResponse]]: + """Stream connects caller to another caller of the same stream""" + +def add_RouterServiceServicer_to_server(servicer: RouterServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/python/packages/jumpstarter/jumpstarter/config/exporter.py b/python/packages/jumpstarter/jumpstarter/config/exporter.py index 3e7b88b2..11f94f1d 100644 --- a/python/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/config/exporter.py @@ -25,13 +25,16 @@ class HookInstanceConfigV1Alpha1(BaseModel): script: str = Field(alias="script", description="The j script to execute for this hook") timeout: int = Field(default=120, description="The hook execution timeout in seconds (default: 120s)") - exit_code: int = Field(alias="exitCode", default=0, description="The expected exit code (default: 0)") - on_failure: Literal["pass", "block", "warn"] = Field( - default="pass", + on_failure: Literal[ + "warn", + "endLease", + "exit", + ] = Field( + default="warn", alias="onFailure", description=( - "Action to take when the expected exit code is not returned: 'pass' continues normally, " - "'block' takes the exporter offline and blocks leases, 'warn' continues and prints a warning" + "Action to take when the expected exit code is not returned: 'endLease' to end the lease, " + "'exit' takes the exporter offline and ends the lease, 'warn' continues and prints a warning" ), ) diff --git a/python/packages/jumpstarter/jumpstarter/driver/base.py b/python/packages/jumpstarter/jumpstarter/driver/base.py index ee3cdd18..fa798d2e 100644 --- a/python/packages/jumpstarter/jumpstarter/driver/base.py +++ b/python/packages/jumpstarter/jumpstarter/driver/base.py @@ -29,13 +29,13 @@ ) from jumpstarter.common import LogSource, Metadata from jumpstarter.common.resources import ClientStreamResource, PresignedRequestResource, Resource, ResourceMetadata -from jumpstarter.exporter.logging import get_logger from jumpstarter.common.serde import decode_value, encode_value from jumpstarter.common.streams import ( DriverStreamRequest, ResourceStreamRequest, ) from jumpstarter.config.env import JMP_DISABLE_COMPRESSION +from jumpstarter.exporter.logging import get_logger from jumpstarter.streams.aiohttp import AiohttpStreamReaderStream from jumpstarter.streams.common import create_memory_stream from jumpstarter.streams.encoding import Compression, compress_stream diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index 301c57bb..8d9c8cf7 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -2,7 +2,7 @@ from collections.abc import AsyncGenerator, Awaitable, Callable from contextlib import asynccontextmanager from dataclasses import dataclass, field -from typing import Self +from typing import Any, Self import grpc from anyio import ( @@ -34,22 +34,136 @@ @dataclass(kw_only=True) class Exporter(AsyncContextManagerMixin, Metadata): + """Represents a Jumpstarter Exporter runtime instance. + + Inherits from Metadata, which provides: + uuid: Unique identifier for the exporter instance (UUID4) + labels: Key-value labels for exporter identification and selector matching + """ + + # Public Configuration Fields + channel_factory: Callable[[], Awaitable[grpc.aio.Channel]] + """Factory function for creating gRPC channels to communicate with the controller. + + Called multiple times throughout the exporter lifecycle to establish connections. + The factory should handle authentication, credentials, and channel configuration. + Used when creating controller stubs, unregistering, and establishing streams. + """ + device_factory: Callable[[], Driver] - lease_name: str = field(init=False, default="") + """Factory function for creating Driver instances representing the hardware/devices. + + Called when creating Sessions to provide access to the underlying device. + The Driver can contain child drivers in a composite pattern, representing + the full device tree being exported. Typically created from ExporterConfigV1Alpha1. + """ + tls: TLSConfigV1Alpha1 = field(default_factory=TLSConfigV1Alpha1) + """TLS/SSL configuration for secure communication with router and controller. + + Contains certificate authority (ca) and insecure flag for certificate verification. + Passed to connect_router_stream() when handling client connections. + Default creates empty config with ca="" and insecure=False. + """ + grpc_options: dict[str, str] = field(default_factory=dict) + """Custom gRPC channel options that override or supplement default settings. + + Merged with defaults (round_robin load balancing, keepalive settings, etc.). + Configured via YAML as grpcOptions in exporter config. + Passed to connect_router_stream() for client connections. + """ + hook_executor: HookExecutor | None = field(default=None) - registered: bool = field(init=False, default=False) + """Optional executor for lifecycle hooks (before-lease and after-lease). + + When configured, runs custom scripts at key points in the lease lifecycle: + - before-lease: Runs when transitioning to leased state (setup, validation) + - after-lease: Runs when transitioning from leased state (cleanup, reset) + Created when hooks.before_lease or hooks.after_lease are defined in config. + """ + + # Internal State Fields + + _lease_name: str = field(init=False, default="") + """Current lease name assigned by the controller. + + Empty string indicates no active lease. Updated when controller assigns/reassigns + the exporter. Used to detect lease transitions and create hook contexts. + """ + + _registered: bool = field(init=False, default=False) + """Tracks whether exporter has successfully registered with the controller. + + Set to True after successful registration. Used to determine if unregistration + is needed during cleanup. + """ + _unregister: bool = field(init=False, default=False) + """Internal flag indicating whether to actively unregister during shutdown. + + Set when stop(should_unregister=True) is called. When False, relies on + heartbeat timeout for implicit unregistration. + """ + _stop_requested: bool = field(init=False, default=False) + """Internal flag indicating a graceful stop has been requested. + + Set to True when stop(wait_for_lease_exit=True) is called. The exporter + waits for the current lease to exit before stopping. + """ + _started: bool = field(init=False, default=False) + """Internal flag tracking whether the exporter has started serving. + + Set to True when the first lease is assigned. Used to determine immediate + vs graceful stop behavior. + """ + _tg: TaskGroup | None = field(init=False, default=None) + """Reference to the anyio TaskGroup managing concurrent tasks. + + Manages streams and connection handling tasks. Used to cancel all tasks + when stopping. Set during serve() and cleared when done. + """ + _current_client_name: str = field(init=False, default="") - _pre_lease_ready: Event | None = field(init=False, default=None) - _current_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) + """Name of the client currently holding the lease. + + Used to create hook contexts with client information and determine if + after-lease hooks should run. Reset when lease is released. + """ + + _before_lease_hook: Event | None = field(init=False, default=None) + """Synchronization event that blocks connection handling until hook completes. + + Created when a new lease starts, waited on before accepting connections, + and set when hook completes or is not configured. + """ + + _exporter_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) + """Current status of the exporter. + + Updated via _update_status() and reported to controller and session. + Possible values: OFFLINE, AVAILABLE, BEFORE_LEASE_HOOK, LEASE_READY, + AFTER_LEASE_HOOK, BEFORE_LEASE_HOOK_FAILED, AFTER_LEASE_HOOK_FAILED. + """ + _current_session: Session | None = field(init=False, default=None) + """Reference to the currently active Session object. + + A Session wraps the root device and provides gRPC service endpoints. + Used to update session status and pass to HookExecutor for logging. + Set in session() context manager and cleared when context exits. + """ + _session_socket_path: str | None = field(init=False, default=None) + """Unix socket path where the current session is serving. + + Passed to hooks so they can communicate with the device via the CLI. + Enables session reuse instead of creating new ones for hooks. + """ def stop(self, wait_for_lease_exit=False, should_unregister=False): """Signal the exporter to stop. @@ -68,57 +182,189 @@ def stop(self, wait_for_lease_exit=False, should_unregister=False): self._stop_requested = True logger.info("Exporter marked for stop upon lease exit") - async def _update_status(self, status: ExporterStatus, message: str = ""): - """Update exporter status with the controller and session.""" - self._current_status = status + async def _get_controller_stub(self) -> jumpstarter_pb2_grpc.ControllerServiceStub: + """Create and return a controller service stub.""" + return jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) + + async def _retry_stream( + self, + stream_name: str, + stream_factory: Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator], + send_tx, + retries: int = 5, + backoff: float = 3.0, + ): + """Generic retry wrapper for gRPC streaming calls. + + Args: + stream_name: Name of the stream for logging purposes + stream_factory: Function that takes a controller stub and returns an async generator + send_tx: Transmission channel to send stream items to + retries: Maximum number of retry attempts + backoff: Seconds to wait between retries + """ + retries_left = retries + while True: + try: + controller = await self._get_controller_stub() + async for item in stream_factory(controller): + await send_tx.send(item) + except Exception as e: + if retries_left > 0: + retries_left -= 1 + logger.info( + "%s stream interrupted, restarting in %ss, %s retries left: %s", + stream_name, + backoff, + retries_left, + e, + ) + await sleep(backoff) + else: + raise + else: + retries_left = retries + + def _listen_stream_factory( + self, lease_name: str + ) -> Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator[jumpstarter_pb2.ListenResponse, None]]: + """Create a stream factory for listening to connection requests. + + Args: + lease_name: The lease name to listen for + + Returns: + A factory function that creates a Listen stream when given a ControllerServiceStub + """ + + def factory( + ctrl: jumpstarter_pb2_grpc.ControllerServiceStub, + ) -> AsyncGenerator[jumpstarter_pb2.ListenResponse, None]: + return ctrl.Listen(jumpstarter_pb2.ListenRequest(lease_name=lease_name)) + + return factory + + def _status_stream_factory( + self, + ) -> Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator[jumpstarter_pb2.StatusResponse, None]]: + """Create a stream factory for status updates. + + Returns: + A factory function that creates a Status stream when given a ControllerServiceStub + """ + + def factory( + ctrl: jumpstarter_pb2_grpc.ControllerServiceStub, + ) -> AsyncGenerator[jumpstarter_pb2.StatusResponse, None]: + return ctrl.Status(jumpstarter_pb2.StatusRequest()) + + return factory + + def _create_hook_context(self, lease_name: str, client_name: str) -> HookContext: + """Create a standardized hook context. + + Args: + lease_name: Name of the lease + client_name: Name of the client + + Returns: + HookContext object with consistent fields + """ + return HookContext( + lease_name=lease_name, + client_name=client_name, + ) + + async def _register_with_controller(self, channel: grpc.aio.Channel): + """Register the exporter with the controller.""" + response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) + logger.info("Registering exporter with controller") + controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) + await controller.Register( + jumpstarter_pb2.RegisterRequest( + labels=self.labels, + reports=response.reports, + ) + ) + self._registered = True + await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") + + async def _report_status(self, status: ExporterStatus, message: str = ""): + """Report the exporter status with the controller and session.""" + self._exporter_status = status # Update session status if available if self._current_session: self._current_session.update_status(status, message) try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) - await controller.UpdateStatus( - jumpstarter_pb2.UpdateStatusRequest( + controller = await self._get_controller_stub() + await controller.ReportStatus( + jumpstarter_pb2.ReportStatusRequest( status=status.to_proto(), - status_message=message, + message=message, ) ) logger.info(f"Updated status to {status}: {message}") except Exception as e: logger.error(f"Failed to update status: {e}") + async def _unregister_with_controller(self): + """Safely unregister from controller with timeout and error handling.""" + if not (self._registered and self._unregister): + return + + logger.info("Unregistering exporter with controller") + try: + with move_on_after(10): # 10 second timeout + channel = await self.channel_factory() + try: + controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) + await self._report_status(ExporterStatus.OFFLINE, "Exporter shutting down") + await controller.Unregister( + jumpstarter_pb2.UnregisterRequest( + reason="Exporter shutdown", + ) + ) + logger.info("Controller unregistration completed successfully") + finally: + with CancelScope(shield=True): + await channel.close() + except Exception as e: + logger.error("Error during controller unregistration: %s", e, exc_info=True) + @asynccontextmanager async def __asynccontextmanager__(self) -> AsyncGenerator[Self]: try: yield self finally: try: - if self.registered and self._unregister: - logger.info("Unregistering exporter with controller") - try: - with move_on_after(10): # 10 second timeout - channel = await self.channel_factory() - try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) - await self._update_status(ExporterStatus.OFFLINE, "Exporter shutting down") - await controller.Unregister( - jumpstarter_pb2.UnregisterRequest( - reason="Exporter shutdown", - ) - ) - logger.info("Controller unregistration completed successfully") - finally: - with CancelScope(shield=True): - await channel.close() - except Exception as e: - logger.error("Error during controller unregistration: %s", e, exc_info=True) - + await self._unregister_with_controller() except Exception as e: logger.error("Error during exporter cleanup: %s", e, exc_info=True) # Don't re-raise to avoid masking the original exception - async def __handle(self, path, endpoint, token, tls_config, grpc_options): + async def _handle_client_conn( + self, path: str, endpoint: str, token: str, tls_config: TLSConfigV1Alpha1, grpc_options: dict[str, Any] | None + ) -> None: + """Handle a single client connection by proxying between session and router. + + This method establishes a connection from the local session Unix socket to the + router endpoint, creating a bidirectional proxy that allows the client to + communicate with the device through the router infrastructure. + + Args: + path: Unix socket path where the session is serving + endpoint: Router endpoint URL to connect to + token: Authentication token for the router connection + tls_config: TLS configuration for secure router communication + grpc_options: Optional gRPC channel options for the router connection + + Note: + This is a private method spawned as a concurrent task by handle_lease_conn() + for each incoming connection request. It runs until the client disconnects + or an error occurs. + """ try: async with await connect_unix(path) as stream: async with connect_router_stream(endpoint, token, stream, tls_config, grpc_options): @@ -128,78 +374,84 @@ async def __handle(self, path, endpoint, token, tls_config, grpc_options): @asynccontextmanager async def session(self): - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) + """Create and manage an exporter Session context.""" with Session( uuid=self.uuid, labels=self.labels, root_device=self.device_factory(), ) as session: - # Store session reference for status updates + # Store session reference outside context for status updates self._current_session = session try: + # Create a Unix socket async with session.serve_unix_async() as path: + # Create a gRPC channel to the controller via the socket async with grpc.aio.secure_channel( f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) ) as channel: - response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) - logger.info("Registering exporter with controller") - await controller.Register( - jumpstarter_pb2.RegisterRequest( - labels=self.labels, - reports=response.reports, - ) - ) - self.registered = True - await self._update_status(ExporterStatus.AVAILABLE, "Exporter registered and available") + # Register the exporter with the controller + await self._register_with_controller(channel) yield path finally: - # Clear session reference + # Clear the session reference self._current_session = None - async def handle(self, lease_name, tg): - logger.info("Listening for incoming connection requests on lease %s", lease_name) + async def handle_lease(self, lease_name: str, tg: TaskGroup) -> None: + """Handle all incoming client connections for a lease. - listen_tx, listen_rx = create_memory_object_stream() + This method orchestrates the complete lifecycle of managing connections during + a lease period. It listens for connection requests and spawns individual + tasks to handle each client connection. - async def listen(retries=5, backoff=3): - retries_left = retries - while True: - try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) - async for request in controller.Listen(jumpstarter_pb2.ListenRequest(lease_name=lease_name)): - await listen_tx.send(request) - except Exception as e: - if retries_left > 0: - retries_left -= 1 - logger.info( - "Listen stream interrupted, restarting in {}s, {} retries left: {}".format( - backoff, retries_left, e - ) - ) - await sleep(backoff) - else: - raise - else: - retries_left = retries + The method performs the following steps: + 1. Sets up a stream to listen for incoming connection requests + 2. Creates a session with a Unix socket for device access + 3. Waits for the before-lease hook to complete (if configured) + 4. Spawns a new task for each incoming connection request - tg.start_soon(listen) + Args: + lease_name: Name of the lease to handle connections for + tg: TaskGroup for spawning concurrent connection handler tasks - # Create session before hooks run + Note: + This method runs for the entire duration of the lease and is spawned by + the serve() method when a lease is assigned. It terminates when the lease + ends or the exporter stops. + """ + logger.info("Listening for incoming connection requests on lease %s", lease_name) + + listen_tx, listen_rx = create_memory_object_stream[jumpstarter_pb2.ListenResponse]() + + # Start listening for connection requests with retry logic + tg.start_soon( + self._retry_stream, + "Listen", + self._listen_stream_factory(lease_name), + listen_tx, + ) + + # Create a lease session to execute hooks and handle connections async with self.session() as path: # Store socket path for hook execution self._session_socket_path = path - # Wait for before-lease hook to complete before processing connections - if self._pre_lease_ready is not None: + # Wait for before-lease hook to complete before processing client connections + if self._before_lease_hook is not None: logger.info("Waiting for before-lease hook to complete before accepting connections") - await self._pre_lease_ready.wait() - logger.info("before-lease hook completed, now accepting connections") + await self._before_lease_hook.wait() + logger.info("Before-lease hook completed, now accepting connections") # Process client connections + # Type: request is jumpstarter_pb2.ListenResponse with router_endpoint and router_token fields async for request in listen_rx: logger.info("Handling new connection request on lease %s", lease_name) tg.start_soon( - self.__handle, path, request.router_endpoint, request.router_token, self.tls, self.grpc_options + self._handle_client_conn, + path, + request.router_endpoint, + request.router_token, + self.tls, + self.grpc_options, ) async def serve(self): # noqa: C901 @@ -209,48 +461,51 @@ async def serve(self): # noqa: C901 # initial registration async with self.session(): pass - status_tx, status_rx = create_memory_object_stream() - - async def status(retries=5, backoff=3): - retries_left = retries - while True: - try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) - async for status in controller.Status(jumpstarter_pb2.StatusRequest()): - await status_tx.send(status) - except Exception as e: - if retries_left > 0: - retries_left -= 1 - logger.info( - "Status stream interrupted, restarting in {}s, {} retries left: {}".format( - backoff, retries_left, e - ) - ) - await sleep(backoff) - else: - raise - else: - retries_left = retries + status_tx, status_rx = create_memory_object_stream[jumpstarter_pb2.StatusResponse]() async with create_task_group() as tg: self._tg = tg - tg.start_soon(status) + # Start status stream with retry logic + tg.start_soon( + self._retry_stream, + "Status", + self._status_stream_factory(), + status_tx, + ) + # Type: status is jumpstarter_pb2.StatusResponse with lease_name and other status fields async for status in status_rx: - if self.lease_name != "" and self.lease_name != status.lease_name: - # After-lease hook for the previous lease + if self._lease_name != "" and self._lease_name != status.lease_name: + # After-lease hook for the previous lease (lease name changed) if self.hook_executor and self._current_client_name: - hook_context = HookContext( - lease_name=self.lease_name, - client_name=self._current_client_name, - ) + hook_context = self._create_hook_context(self._lease_name, self._current_client_name) # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): - await self.run_after_lease_hook(hook_context) + await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + self.hook_executor.main_session = self._current_session + try: + await self.hook_executor.execute_after_lease_hook( + hook_context, socket_path=self._session_socket_path + ) + await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") + except HookExecutionError as e: + logger.error("afterLease hook failed (on_failure=endLease/exit): %s", e) + await self._report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed: {e}", + ) + logger.error("Shutting down exporter due to afterLease hook failure") + self.stop() + except Exception as e: + logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) + await self._report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed: {e}", + ) - self.lease_name = status.lease_name + self._lease_name = status.lease_name logger.info("Lease status changed, killing existing connections") # Reset event for next lease - self._pre_lease_ready = None + self._before_lease_hook = None self.stop() break @@ -258,12 +513,12 @@ async def status(retries=5, backoff=3): previous_leased = hasattr(self, "_previous_leased") and self._previous_leased current_leased = status.leased - self.lease_name = status.lease_name - if not self._started and self.lease_name != "": + self._lease_name = status.lease_name + if not self._started and self._lease_name != "": self._started = True # Create event for pre-lease synchronization - self._pre_lease_ready = Event() - tg.start_soon(self.handle, self.lease_name, tg) + self._before_lease_hook = Event() + tg.start_soon(self.handle_lease, self._lease_name, tg) if current_leased: logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) @@ -272,39 +527,33 @@ async def status(retries=5, backoff=3): # Before-lease hook when transitioning from unleased to leased if not previous_leased: if self.hook_executor: - hook_context = HookContext( - lease_name=status.lease_name, - client_name=status.client_name, - ) - tg.start_soon(self.run_before_lease_hook, self, hook_context) + hook_context = self._create_hook_context(status.lease_name, status.client_name) + tg.start_soon(self.run_before_lease_hook, hook_context) else: # No hook configured, set event immediately - await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") - if self._pre_lease_ready: - self._pre_lease_ready.set() + await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") + if self._before_lease_hook: + self._before_lease_hook.set() else: logger.info("Currently not leased") # After-lease hook when transitioning from leased to unleased if previous_leased and self.hook_executor and self._current_client_name: - hook_context = HookContext( - lease_name=self.lease_name, - client_name=self._current_client_name, - ) + hook_context = self._create_hook_context(self._lease_name, self._current_client_name) # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): - await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") # Pass the current session to hook executor for logging self.hook_executor.main_session = self._current_session # Use session socket if available, otherwise create new session await self.hook_executor.execute_after_lease_hook( hook_context, socket_path=self._session_socket_path ) - await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") + await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") self._current_client_name = "" # Reset event for next lease - self._pre_lease_ready = None + self._before_lease_hook = None if self._stop_requested: self.stop(should_unregister=True) @@ -321,7 +570,7 @@ async def run_before_lease_hook(self, hook_ctx: HookContext): hook_ctx (HookContext): The current hook execution context """ try: - await self._update_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks") + await self._report_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks") # Pass the current session to hook executor for logging self.hook_executor.main_session = self._current_session @@ -331,12 +580,12 @@ async def run_before_lease_hook(self, hook_ctx: HookContext): # Execute hook with main session socket await self.hook_executor.execute_before_lease_hook(hook_ctx, socket_path=self._session_socket_path) - await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") + await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") logger.info("beforeLease hook completed successfully") except HookExecutionError as e: # Hook failed with on_failure='block' - end lease and set failed status logger.error("beforeLease hook failed (on_failure=block): %s", e) - await self._update_status( + await self._report_status( ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed (on_failure=block): {e}" ) # Note: We don't take the exporter offline for before_lease hook failures @@ -344,11 +593,11 @@ async def run_before_lease_hook(self, hook_ctx: HookContext): except Exception as e: # Unexpected error during hook execution logger.error("beforeLease hook failed with unexpected error: %s", e, exc_info=True) - await self._update_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed: {e}") + await self._report_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed: {e}") finally: # Always set the event to unblock connections - if self._pre_lease_ready: - self._pre_lease_ready.set() + if self._before_lease_hook: + self._before_lease_hook.set() async def run_after_lease_hook(self, hook_ctx: HookContext): """ @@ -358,17 +607,17 @@ async def run_after_lease_hook(self, hook_ctx: HookContext): hook_ctx (HookContext): The current hook execution context """ try: - await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") # Pass the current session to hook executor for logging self.hook_executor.main_session = self._current_session # Use session socket if available, otherwise create new session await self.hook_executor.execute_after_lease_hook(hook_ctx, socket_path=self._session_socket_path) - await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") + await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") logger.info("afterLease hook completed successfully") except HookExecutionError as e: # Hook failed with on_failure='block' - set failed status and shut down exporter logger.error("afterLease hook failed (on_failure=block): %s", e) - await self._update_status( + await self._report_status( ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed (on_failure=block): {e}" ) # Shut down the exporter after after_lease hook failure with on_failure='block' @@ -377,4 +626,4 @@ async def run_after_lease_hook(self, hook_ctx: HookContext): except Exception as e: # Unexpected error during hook execution logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) - await self._update_status(ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed: {e}") + await self._report_status(ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed: {e}") diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py index 77803b28..a3827560 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -5,7 +5,7 @@ import os from contextlib import asynccontextmanager from dataclasses import dataclass, field -from typing import Callable +from typing import AsyncGenerator, Callable from jumpstarter.common import LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST @@ -18,7 +18,7 @@ class HookExecutionError(Exception): - """Raised when a hook fails and on_failure is set to 'block'.""" + """Raised when a hook fails and on_failure is set to 'endLease' or 'exit'.""" pass @@ -42,8 +42,34 @@ class HookExecutor: device_factory: Callable[[], Driver] main_session: Session | None = field(default=None) + def _create_hook_env(self, context: HookContext, socket_path: str) -> dict[str, str]: + """Create standardized hook environment variables. + + Args: + context: Hook context information + socket_path: Path to the Unix socket for JUMPSTARTER_HOST + + Returns: + Dictionary of environment variables for hook execution + """ + hook_env = os.environ.copy() + hook_env.update( + { + JUMPSTARTER_HOST: str(socket_path), + JMP_DRIVERS_ALLOW: "UNSAFE", # Allow all drivers for local access + "LEASE_NAME": context.lease_name, + "CLIENT_NAME": context.client_name, + "LEASE_DURATION": context.lease_duration, + "EXPORTER_NAME": context.exporter_name, + "EXPORTER_NAMESPACE": context.exporter_namespace, + } + ) + return hook_env + @asynccontextmanager - async def _create_hook_environment(self, context: HookContext): + async def _create_hook_environment( + self, context: HookContext + ) -> AsyncGenerator[tuple[Session, dict[str, str]], None]: """Create a local session and Unix socket for j CLI access.""" with Session( root_device=self.device_factory(), @@ -55,18 +81,7 @@ async def _create_hook_environment(self, context: HookContext): ) as session: async with session.serve_unix_async() as unix_path: # Create environment variables for the hook - hook_env = os.environ.copy() - hook_env.update( - { - JUMPSTARTER_HOST: str(unix_path), - JMP_DRIVERS_ALLOW: "UNSAFE", # Allow all drivers for local access - "LEASE_NAME": context.lease_name, - "CLIENT_NAME": context.client_name, - "LEASE_DURATION": context.lease_duration, - "EXPORTER_NAME": context.exporter_name, - "EXPORTER_NAMESPACE": context.exporter_namespace, - } - ) + hook_env = self._create_hook_env(context, unix_path) yield session, hook_env @@ -76,11 +91,11 @@ async def _execute_hook( context: HookContext, log_source: LogSource, socket_path: str | None = None, - ): + ) -> None: """Execute a single hook command. Args: - hook_config: Hook configuration including script, timeout, exit_code, and on_failure + hook_config: Hook configuration including script, timeout, and on_failure context: Hook context information log_source: Log source for hook output socket_path: Optional Unix socket path to reuse existing session. @@ -96,18 +111,7 @@ async def _execute_hook( # If socket_path provided, use existing session; otherwise create new one if socket_path is not None: # Reuse existing session - create environment without session creation - hook_env = os.environ.copy() - hook_env.update( - { - JUMPSTARTER_HOST: str(socket_path), - JMP_DRIVERS_ALLOW: "UNSAFE", - "LEASE_NAME": context.lease_name, - "CLIENT_NAME": context.client_name, - "LEASE_DURATION": context.lease_duration, - "EXPORTER_NAME": context.exporter_name, - "EXPORTER_NAMESPACE": context.exporter_namespace, - } - ) + hook_env = self._create_hook_env(context, socket_path) # Use main session for logging (must be available when socket_path is provided) logging_session = self.main_session @@ -127,13 +131,12 @@ async def _execute_hook_process( hook_config: HookInstanceConfigV1Alpha1, context: HookContext, log_source: LogSource, - hook_env: dict, + hook_env: dict[str, str], logging_session: Session, - ): + ) -> None: """Execute the hook process with the given environment and logging session.""" command = hook_config.script timeout = hook_config.timeout - expected_exit_code = hook_config.exit_code on_failure = hook_config.on_failure try: @@ -165,22 +168,22 @@ async def read_output(): # Run output reading and process waiting concurrently with timeout await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=timeout) - # Check if exit code matches expected - if process.returncode == expected_exit_code: - logger.info("Hook executed successfully with exit code %d", process.returncode) + # Check if hook succeeded (exit code 0) + if process.returncode == 0: + logger.info("Hook executed successfully") return else: - # Exit code mismatch - handle according to on_failure setting - error_msg = f"Hook failed: expected exit code {expected_exit_code}, got {process.returncode}" + # Non-zero exit code is a failure - handle according to on_failure setting + error_msg = f"Hook failed with exit code {process.returncode}" - if on_failure == "pass": - logger.info("%s (on_failure=pass, continuing)", error_msg) - return - elif on_failure == "warn": + if on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) return - else: # on_failure == "block" - logger.error("%s (on_failure=block, raising exception)", error_msg) + elif on_failure == "endLease": + logger.error("%s (on_failure=endLease, raising exception)", error_msg) + raise HookExecutionError(error_msg) + else: # on_failure == "exit" + logger.error("%s (on_failure=exit, raising exception)", error_msg) raise HookExecutionError(error_msg) except asyncio.TimeoutError as e: @@ -194,13 +197,12 @@ async def read_output(): await process.wait() # Handle timeout according to on_failure setting - if on_failure == "pass": - logger.info("%s (on_failure=pass, continuing)", error_msg) - return - elif on_failure == "warn": + if on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) return - else: # on_failure == "block" + elif on_failure == "endLease": + raise HookExecutionError(error_msg) from e + else: # on_failure == "exit" raise HookExecutionError(error_msg) from e except HookExecutionError: @@ -211,16 +213,15 @@ async def read_output(): logger.error(error_msg, exc_info=True) # Handle exception according to on_failure setting - if on_failure == "pass": - logger.info("%s (on_failure=pass, continuing)", error_msg) - return - elif on_failure == "warn": + if on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) return - else: # on_failure == "block" + elif on_failure == "endLease": + raise HookExecutionError(error_msg) from e + else: # on_failure == "exit" raise HookExecutionError(error_msg) from e - async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None): + async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None) -> None: """Execute the before-lease hook. Args: @@ -228,7 +229,7 @@ async def execute_before_lease_hook(self, context: HookContext, socket_path: str socket_path: Optional Unix socket path to reuse existing session Raises: - HookExecutionError: If hook fails and on_failure is set to 'block' + HookExecutionError: If hook fails and on_failure is set to 'endLease' or 'exit' """ if not self.config.before_lease: logger.debug("No before-lease hook configured") @@ -242,7 +243,7 @@ async def execute_before_lease_hook(self, context: HookContext, socket_path: str socket_path, ) - async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None): + async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None) -> None: """Execute the after-lease hook. Args: @@ -250,7 +251,7 @@ async def execute_after_lease_hook(self, context: HookContext, socket_path: str socket_path: Optional Unix socket path to reuse existing session Raises: - HookExecutionError: If hook fails and on_failure is set to 'block' + HookExecutionError: If hook fails and on_failure is set to 'endLease' or 'exit' """ if not self.config.after_lease: logger.debug("No after-lease hook configured") diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py index 0e18d332..f31257df 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -1,4 +1,5 @@ import asyncio +from typing import Callable from unittest.mock import AsyncMock, Mock, call, patch import pytest @@ -16,23 +17,23 @@ class MockDriver(Driver): def client(cls) -> str: return "test.MockClient" - def close(self): + def close(self) -> None: pass - def reset(self): + def reset(self) -> None: pass @pytest.fixture -def mock_device_factory(): - def factory(): +def mock_device_factory() -> Callable[[], MockDriver]: + def factory() -> MockDriver: return MockDriver() return factory @pytest.fixture -def hook_config(): +def hook_config() -> HookConfigV1Alpha1: return HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease hook executed'", timeout=10), @@ -40,7 +41,7 @@ def hook_config(): @pytest.fixture -def hook_context(): +def hook_context() -> HookContext: return HookContext( lease_name="test-lease-123", client_name="test-client", @@ -51,7 +52,7 @@ def hook_context(): class TestHookExecutor: - async def test_hook_executor_creation(self, hook_config, mock_device_factory): + async def test_hook_executor_creation(self, hook_config, mock_device_factory) -> None: executor = HookExecutor( config=hook_config, device_factory=mock_device_factory, @@ -60,18 +61,18 @@ async def test_hook_executor_creation(self, hook_config, mock_device_factory): assert executor.config == hook_config assert executor.device_factory == mock_device_factory - async def test_empty_hook_execution(self, mock_device_factory, hook_context): + async def test_empty_hook_execution(self, mock_device_factory, hook_context) -> None: empty_config = HookConfigV1Alpha1() executor = HookExecutor( config=empty_config, device_factory=mock_device_factory, ) - # Both hooks should return True for empty/None commands - assert await executor.execute_before_lease_hook(hook_context) is True - assert await executor.execute_after_lease_hook(hook_context) is True + # Both hooks should return None for empty/None commands + assert await executor.execute_before_lease_hook(hook_context) is None + assert await executor.execute_after_lease_hook(hook_context) is None - async def test_successful_hook_execution(self, mock_device_factory, hook_context): + async def test_successful_hook_execution(self, mock_device_factory, hook_context) -> None: hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) @@ -102,7 +103,7 @@ async def test_successful_hook_execution(self, mock_device_factory, hook_context result = await executor.execute_before_lease_hook(hook_context) - assert result is True + assert result is None # Verify subprocess was called with correct environment mock_subprocess.assert_called_once() @@ -117,11 +118,11 @@ async def test_successful_hook_execution(self, mock_device_factory, hook_context assert env["LEASE_NAME"] == "test-lease-123" assert env["CLIENT_NAME"] == "test-client" - async def test_failed_hook_execution(self, mock_device_factory, hook_context): + async def test_failed_hook_execution(self, mock_device_factory, hook_context) -> None: failed_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1( - script="exit 1", timeout=10, on_failure="block" - ), # Command that will fail with on_failure="block" + script="exit 1", timeout=10, on_failure="endLease" + ), # Command that will fail with on_failure="endLease" ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -147,15 +148,15 @@ async def test_failed_hook_execution(self, mock_device_factory, hook_context): device_factory=mock_device_factory, ) - # Should raise HookExecutionError since on_failure="block" - with pytest.raises(HookExecutionError, match="expected exit code 0, got 1"): + # Should raise HookExecutionError since on_failure="endLease" + with pytest.raises(HookExecutionError, match="Hook failed with exit code 1"): await executor.execute_before_lease_hook(hook_context) - async def test_hook_timeout(self, mock_device_factory, hook_context): + async def test_hook_timeout(self, mock_device_factory, hook_context) -> None: timeout_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1( - script="sleep 60", timeout=1, on_failure="block" - ), # Command that will timeout with on_failure="block" + script="sleep 60", timeout=1, on_failure="exit" + ), # Command that will timeout with on_failure="exit" ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -179,13 +180,13 @@ async def test_hook_timeout(self, mock_device_factory, hook_context): device_factory=mock_device_factory, ) - # Should raise HookExecutionError since on_failure="block" + # Should raise HookExecutionError since on_failure="exit" with pytest.raises(HookExecutionError, match="timed out after 1 seconds"): await executor.execute_before_lease_hook(hook_context) mock_process.terminate.assert_called_once() - async def test_hook_environment_variables(self, mock_device_factory, hook_context): + async def test_hook_environment_variables(self, mock_device_factory, hook_context) -> None: hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) @@ -224,7 +225,7 @@ async def test_hook_environment_variables(self, mock_device_factory, hook_contex assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" - async def test_real_time_output_logging(self, mock_device_factory, hook_context): + async def test_real_time_output_logging(self, mock_device_factory, hook_context) -> None: """Test that hook output is logged in real-time at INFO level.""" hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1( @@ -262,17 +263,17 @@ async def test_real_time_output_logging(self, mock_device_factory, hook_context) result = await executor.execute_before_lease_hook(hook_context) - assert result is True + assert result is None # Verify that output lines were logged in real-time at INFO level expected_calls = [ call("Executing before-lease hook for lease %s", "test-lease-123"), call("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), - call("Hook executed successfully with exit code %d", 0), + call("Hook executed successfully"), ] mock_logger.info.assert_has_calls(expected_calls, any_order=False) - async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, hook_context): + async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, hook_context) -> None: """Test that post-lease hook executes when called directly.""" hook_config = HookConfigV1Alpha1( after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease cleanup completed'", timeout=10), @@ -306,168 +307,17 @@ async def test_post_lease_hook_execution_on_completion(self, mock_device_factory result = await executor.execute_after_lease_hook(hook_context) - assert result is True + assert result is None # Verify that post-lease hook output was logged expected_calls = [ call("Executing after-lease hook for lease %s", "test-lease-123"), call("Executing hook: %s", "echo 'Post-lease cleanup completed'"), - call("Hook executed successfully with exit code %d", 0), + call("Hook executed successfully"), ] mock_logger.info.assert_has_calls(expected_calls, any_order=False) - async def test_hook_exit_code_matching_success(self, mock_device_factory, hook_context): - """Test that hook succeeds when exit code matches expected value.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="exit 0", timeout=10, exit_code=0), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 0 - mock_process.stdout.readline.side_effect = [b""] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is True - - async def test_hook_exit_code_matching_custom(self, mock_device_factory, hook_context): - """Test that hook succeeds when exit code matches custom expected value.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="exit 42", timeout=10, exit_code=42), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 42 - mock_process.stdout.readline.side_effect = [b""] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is True - - async def test_hook_exit_code_mismatch_pass(self, mock_device_factory, hook_context): - """Test that hook succeeds when exit code mismatches but on_failure='pass'.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="pass"), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 1 - mock_process.stdout.readline.side_effect = [b""] - mock_process.wait = AsyncMock(return_value=None) - - with ( - patch("asyncio.create_subprocess_shell", return_value=mock_process), - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - ): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is True - # Verify INFO log was created (using format string) - mock_logger.info.assert_any_call( - "%s (on_failure=pass, continuing)", "Hook failed: expected exit code 0, got 1" - ) - - async def test_hook_exit_code_mismatch_warn(self, mock_device_factory, hook_context): - """Test that hook succeeds when exit code mismatches but on_failure='warn'.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="warn"), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 1 - mock_process.stdout.readline.side_effect = [b""] - mock_process.wait = AsyncMock(return_value=None) - - with ( - patch("asyncio.create_subprocess_shell", return_value=mock_process), - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - ): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is True - # Verify WARNING log was created (using format string) - mock_logger.warning.assert_any_call( - "%s (on_failure=warn, continuing)", "Hook failed: expected exit code 0, got 1" - ) - - async def test_hook_exit_code_mismatch_block(self, mock_device_factory, hook_context): - """Test that hook raises exception when exit code mismatches and on_failure='block'.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="block"), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 1 - mock_process.stdout.readline.side_effect = [b""] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - with pytest.raises(HookExecutionError, match="expected exit code 0, got 1"): - await executor.execute_before_lease_hook(hook_context) - - async def test_hook_timeout_with_pass(self, mock_device_factory, hook_context): - """Test that hook succeeds when timeout occurs but on_failure='pass'.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="pass"), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.terminate = AsyncMock(return_value=None) - mock_process.wait = AsyncMock(return_value=None) - - with ( - patch("asyncio.create_subprocess_shell", return_value=mock_process), - patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - ): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is True - # Verify INFO log was created - assert any("on_failure=pass, continuing" in str(call) for call in mock_logger.info.call_args_list) - - async def test_hook_timeout_with_warn(self, mock_device_factory, hook_context): + async def test_hook_timeout_with_warn(self, mock_device_factory, hook_context) -> None: """Test that hook succeeds when timeout occurs but on_failure='warn'.""" hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="warn"), @@ -490,6 +340,6 @@ async def test_hook_timeout_with_warn(self, mock_device_factory, hook_context): ): executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) result = await executor.execute_before_lease_hook(hook_context) - assert result is True + assert result is None # Verify WARNING log was created assert any("on_failure=warn, continuing" in str(call) for call in mock_logger.warning.call_args_list) From c480fafd226fec0a0fa18361853cedddf9bd12e3 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 24 Nov 2025 14:07:09 -0500 Subject: [PATCH 14/30] Improve messaging and typing --- .../jumpstarter/exporter/exporter.py | 49 ++++++------------- 1 file changed, 15 insertions(+), 34 deletions(-) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index 8d9c8cf7..ef7398e9 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -175,8 +175,12 @@ def stop(self, wait_for_lease_exit=False, should_unregister=False): # Stop immediately if not started yet or if immediate stop is requested if (not self._started or not wait_for_lease_exit) and self._tg is not None: - logger.info("Stopping exporter immediately, unregister from controller=%s", should_unregister) + if should_unregister: + logger.info("Stopping exporter immediately, unregistering from controller") + else: + logger.info("Stopping exporter immediately, will not unregister from controller") self._unregister = should_unregister + # Cancel any ongoing tasks self._tg.cancel_scope.cancel() elif not self._stop_requested: self._stop_requested = True @@ -228,14 +232,7 @@ async def _retry_stream( def _listen_stream_factory( self, lease_name: str ) -> Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator[jumpstarter_pb2.ListenResponse, None]]: - """Create a stream factory for listening to connection requests. - - Args: - lease_name: The lease name to listen for - - Returns: - A factory function that creates a Listen stream when given a ControllerServiceStub - """ + """Create a stream factory for listening to connection requests.""" def factory( ctrl: jumpstarter_pb2_grpc.ControllerServiceStub, @@ -247,11 +244,7 @@ def factory( def _status_stream_factory( self, ) -> Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator[jumpstarter_pb2.StatusResponse, None]]: - """Create a stream factory for status updates. - - Returns: - A factory function that creates a Status stream when given a ControllerServiceStub - """ + """Create a stream factory for status updates.""" def factory( ctrl: jumpstarter_pb2_grpc.ControllerServiceStub, @@ -260,24 +253,10 @@ def factory( return factory - def _create_hook_context(self, lease_name: str, client_name: str) -> HookContext: - """Create a standardized hook context. - - Args: - lease_name: Name of the lease - client_name: Name of the client - - Returns: - HookContext object with consistent fields - """ - return HookContext( - lease_name=lease_name, - client_name=client_name, - ) - async def _register_with_controller(self, channel: grpc.aio.Channel): """Register the exporter with the controller.""" - response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) + exporter_stub = jumpstarter_pb2_grpc.ExporterServiceStub(channel) + response: jumpstarter_pb2.GetReportResponse = await exporter_stub.GetReport(empty_pb2.Empty()) logger.info("Registering exporter with controller") controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) await controller.Register( @@ -286,7 +265,10 @@ async def _register_with_controller(self, channel: grpc.aio.Channel): reports=response.reports, ) ) + # Mark exporter as registered internally self._registered = True + # Report that exporter is available to the controller + # TODO: Determine if the controller should handle this logic internally await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") async def _report_status(self, status: ExporterStatus, message: str = ""): @@ -472,12 +454,11 @@ async def serve(self): # noqa: C901 self._status_stream_factory(), status_tx, ) - # Type: status is jumpstarter_pb2.StatusResponse with lease_name and other status fields async for status in status_rx: if self._lease_name != "" and self._lease_name != status.lease_name: # After-lease hook for the previous lease (lease name changed) if self.hook_executor and self._current_client_name: - hook_context = self._create_hook_context(self._lease_name, self._current_client_name) + hook_context = HookContext(self._lease_name, self._current_client_name) # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") @@ -527,7 +508,7 @@ async def serve(self): # noqa: C901 # Before-lease hook when transitioning from unleased to leased if not previous_leased: if self.hook_executor: - hook_context = self._create_hook_context(status.lease_name, status.client_name) + hook_context = HookContext(status.lease_name, status.client_name) tg.start_soon(self.run_before_lease_hook, hook_context) else: # No hook configured, set event immediately @@ -539,7 +520,7 @@ async def serve(self): # noqa: C901 # After-lease hook when transitioning from leased to unleased if previous_leased and self.hook_executor and self._current_client_name: - hook_context = self._create_hook_context(self._lease_name, self._current_client_name) + hook_context = HookContext(self._lease_name, self._current_client_name) # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") From 079d39ba172d5d44c191d3e4ae2193ee2016394f Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 14:17:42 -0500 Subject: [PATCH 15/30] Finish refactoring the Exporter class and improve hooks handling --- .../jumpstarter/jumpstarter/common/utils.py | 9 +- .../jumpstarter/config/exporter.py | 8 +- .../jumpstarter/exporter/exporter.py | 286 ++++-------- .../jumpstarter/jumpstarter/exporter/hooks.py | 358 +++++++++----- .../jumpstarter/exporter/hooks_test.py | 442 ++++++++---------- .../jumpstarter/exporter/lease_context.py | 63 +++ .../jumpstarter/exporter/session.py | 10 +- 7 files changed, 626 insertions(+), 550 deletions(-) create mode 100644 python/packages/jumpstarter/jumpstarter/exporter/lease_context.py diff --git a/python/packages/jumpstarter/jumpstarter/common/utils.py b/python/packages/jumpstarter/jumpstarter/common/utils.py index dac73cad..6a0fa8f1 100644 --- a/python/packages/jumpstarter/jumpstarter/common/utils.py +++ b/python/packages/jumpstarter/jumpstarter/common/utils.py @@ -5,20 +5,23 @@ from datetime import timedelta from functools import partial from subprocess import Popen +from typing import TYPE_CHECKING from anyio.from_thread import BlockingPortal, start_blocking_portal from jumpstarter.client import client_from_path from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST -from jumpstarter.driver import Driver from jumpstarter.exporter import Session from jumpstarter.utils.env import env +if TYPE_CHECKING: + from jumpstarter.driver import Driver + __all__ = ["env"] @asynccontextmanager -async def serve_async(root_device: Driver, portal: BlockingPortal, stack: ExitStack): +async def serve_async(root_device: "Driver", portal: BlockingPortal, stack: ExitStack): with Session(root_device=root_device) as session: async with session.serve_unix_async() as path: # SAFETY: the root_device instance is constructed locally thus considered trusted @@ -31,7 +34,7 @@ async def serve_async(root_device: Driver, portal: BlockingPortal, stack: ExitSt @contextmanager -def serve(root_device: Driver): +def serve(root_device: "Driver"): with start_blocking_portal() as portal: with ExitStack() as stack: with portal.wrap_async_context_manager(serve_async(root_device, portal, stack)) as client: diff --git a/python/packages/jumpstarter/jumpstarter/config/exporter.py b/python/packages/jumpstarter/jumpstarter/config/exporter.py index 11f94f1d..b4998caf 100644 --- a/python/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/config/exporter.py @@ -2,7 +2,7 @@ from contextlib import asynccontextmanager, contextmanager, suppress from pathlib import Path -from typing import Any, ClassVar, Literal, Optional, Self +from typing import TYPE_CHECKING, Any, ClassVar, Literal, Optional, Self import grpc import yaml @@ -15,7 +15,9 @@ from jumpstarter.common.exceptions import ConfigurationError from jumpstarter.common.grpc import aio_secure_channel, ssl_channel_credentials from jumpstarter.common.importlib import import_class -from jumpstarter.driver import Driver + +if TYPE_CHECKING: + from jumpstarter.driver import Driver class HookInstanceConfigV1Alpha1(BaseModel): @@ -71,7 +73,7 @@ class ExporterConfigV1Alpha1DriverInstance(RootModel): | ExporterConfigV1Alpha1DriverInstanceProxy ) - def instantiate(self) -> Driver: + def instantiate(self) -> "Driver": match self.root: case ExporterConfigV1Alpha1DriverInstanceBase(): driver_class = import_class(self.root.type, [], True) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index ef7398e9..1efc2dff 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -2,7 +2,7 @@ from collections.abc import AsyncGenerator, Awaitable, Callable from contextlib import asynccontextmanager from dataclasses import dataclass, field -from typing import Any, Self +from typing import TYPE_CHECKING, Any, Self import grpc from anyio import ( @@ -25,10 +25,13 @@ from jumpstarter.common import ExporterStatus, Metadata from jumpstarter.common.streams import connect_router_stream from jumpstarter.config.tls import TLSConfigV1Alpha1 -from jumpstarter.driver import Driver -from jumpstarter.exporter.hooks import HookContext, HookExecutionError, HookExecutor +from jumpstarter.exporter.hooks import HookExecutor +from jumpstarter.exporter.lease_context import LeaseContext from jumpstarter.exporter.session import Session +if TYPE_CHECKING: + from jumpstarter.driver import Driver + logger = logging.getLogger(__name__) @@ -51,7 +54,7 @@ class Exporter(AsyncContextManagerMixin, Metadata): Used when creating controller stubs, unregistering, and establishing streams. """ - device_factory: Callable[[], Driver] + device_factory: Callable[[], "Driver"] """Factory function for creating Driver instances representing the hardware/devices. Called when creating Sessions to provide access to the underlying device. @@ -86,13 +89,6 @@ class Exporter(AsyncContextManagerMixin, Metadata): # Internal State Fields - _lease_name: str = field(init=False, default="") - """Current lease name assigned by the controller. - - Empty string indicates no active lease. Updated when controller assigns/reassigns - the exporter. Used to detect lease transitions and create hook contexts. - """ - _registered: bool = field(init=False, default=False) """Tracks whether exporter has successfully registered with the controller. @@ -128,20 +124,6 @@ class Exporter(AsyncContextManagerMixin, Metadata): when stopping. Set during serve() and cleared when done. """ - _current_client_name: str = field(init=False, default="") - """Name of the client currently holding the lease. - - Used to create hook contexts with client information and determine if - after-lease hooks should run. Reset when lease is released. - """ - - _before_lease_hook: Event | None = field(init=False, default=None) - """Synchronization event that blocks connection handling until hook completes. - - Created when a new lease starts, waited on before accepting connections, - and set when hook completes or is not configured. - """ - _exporter_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) """Current status of the exporter. @@ -150,19 +132,22 @@ class Exporter(AsyncContextManagerMixin, Metadata): AFTER_LEASE_HOOK, BEFORE_LEASE_HOOK_FAILED, AFTER_LEASE_HOOK_FAILED. """ - _current_session: Session | None = field(init=False, default=None) - """Reference to the currently active Session object. + _lease_scope: LeaseContext | None = field(init=False, default=None) + """Encapsulates all resources associated with the current lease. - A Session wraps the root device and provides gRPC service endpoints. - Used to update session status and pass to HookExecutor for logging. - Set in session() context manager and cleared when context exits. - """ + Contains the session, socket path, and synchronization event needed + throughout the lease lifecycle. This replaces the previous individual + _current_session, _session_socket_path, and _before_lease_hook fields. - _session_socket_path: str | None = field(init=False, default=None) - """Unix socket path where the current session is serving. + Lifecycle: + 1. Created in serve() when a lease is assigned (session/socket initially None) + 2. Populated in handle_lease() when the session is created + 3. Accessed by hook execution methods and status reporting + 4. Cleared when lease ends or changes - Passed to hooks so they can communicate with the device via the CLI. - Enables session reuse instead of creating new ones for hooks. + The session and socket are managed by the context manager in handle_lease(), + ensuring proper cleanup when the lease ends. The LeaseScope itself is just + a reference holder and doesn't manage resource lifecycles directly. """ def stop(self, wait_for_lease_exit=False, should_unregister=False): @@ -276,8 +261,8 @@ async def _report_status(self, status: ExporterStatus, message: str = ""): self._exporter_status = status # Update session status if available - if self._current_session: - self._current_session.update_status(status, message) + if self._lease_scope and self._lease_scope.session: + self._lease_scope.session.update_status(status, message) try: controller = await self._get_controller_stub() @@ -356,29 +341,28 @@ async def _handle_client_conn( @asynccontextmanager async def session(self): - """Create and manage an exporter Session context.""" + """Create and manage an exporter Session context. + + Yields: + tuple[Session, str]: A tuple of (session, socket_path) for use in lease handling. + """ with Session( uuid=self.uuid, labels=self.labels, root_device=self.device_factory(), ) as session: - # Store session reference outside context for status updates - self._current_session = session - try: - # Create a Unix socket - async with session.serve_unix_async() as path: - # Create a gRPC channel to the controller via the socket - async with grpc.aio.secure_channel( - f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) - ) as channel: - # Register the exporter with the controller - await self._register_with_controller(channel) - yield path - finally: - # Clear the session reference - self._current_session = None - - async def handle_lease(self, lease_name: str, tg: TaskGroup) -> None: + # Create a Unix socket + async with session.serve_unix_async() as path: + # Create a gRPC channel to the controller via the socket + async with grpc.aio.secure_channel( + f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) + ) as channel: + # Register the exporter with the controller + await self._register_with_controller(channel) + # Yield both session and path for creating LeaseScope + yield session, path + + async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseContext) -> None: """Handle all incoming client connections for a lease. This method orchestrates the complete lifecycle of managing connections during @@ -386,14 +370,16 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup) -> None: tasks to handle each client connection. The method performs the following steps: - 1. Sets up a stream to listen for incoming connection requests - 2. Creates a session with a Unix socket for device access - 3. Waits for the before-lease hook to complete (if configured) - 4. Spawns a new task for each incoming connection request + 1. Creates a session for the lease duration + 2. Populates the lease_scope with session and socket path + 3. Sets up a stream to listen for incoming connection requests + 4. Waits for the before-lease hook to complete (if configured) + 5. Spawns a new task for each incoming connection request Args: lease_name: Name of the lease to handle connections for tg: TaskGroup for spawning concurrent connection handler tasks + lease_scope: LeaseScope with before_lease_hook event (session/socket set here) Note: This method runs for the entire duration of the lease and is spawned by @@ -412,16 +398,16 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup) -> None: listen_tx, ) - # Create a lease session to execute hooks and handle connections - async with self.session() as path: - # Store socket path for hook execution - self._session_socket_path = path + # Create session for the lease duration and populate lease_scope + async with self.session() as (session, path): + # Populate the lease scope with session and socket path + lease_scope.session = session + lease_scope.socket_path = path # Wait for before-lease hook to complete before processing client connections - if self._before_lease_hook is not None: - logger.info("Waiting for before-lease hook to complete before accepting connections") - await self._before_lease_hook.wait() - logger.info("Before-lease hook completed, now accepting connections") + logger.info("Waiting for before-lease hook to complete before accepting connections") + await lease_scope.before_lease_hook.wait() + logger.info("Before-lease hook completed, now accepting connections") # Process client connections # Type: request is jumpstarter_pb2.ListenResponse with router_endpoint and router_token fields @@ -429,7 +415,7 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup) -> None: logger.info("Handling new connection request on lease %s", lease_name) tg.start_soon( self._handle_client_conn, - path, + lease_scope.socket_path, request.router_endpoint, request.router_token, self.tls, @@ -455,38 +441,25 @@ async def serve(self): # noqa: C901 status_tx, ) async for status in status_rx: - if self._lease_name != "" and self._lease_name != status.lease_name: + # Check if lease name changed (and there was a previous active lease) + lease_changed = ( + self._lease_scope + and self._lease_scope.is_active() + and self._lease_scope.lease_name != status.lease_name + ) + if lease_changed: # After-lease hook for the previous lease (lease name changed) - if self.hook_executor and self._current_client_name: - hook_context = HookContext(self._lease_name, self._current_client_name) - # Shield the after-lease hook from cancellation and await it + if self.hook_executor and self._lease_scope.has_client(): with CancelScope(shield=True): - await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") - self.hook_executor.main_session = self._current_session - try: - await self.hook_executor.execute_after_lease_hook( - hook_context, socket_path=self._session_socket_path - ) - await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") - except HookExecutionError as e: - logger.error("afterLease hook failed (on_failure=endLease/exit): %s", e) - await self._report_status( - ExporterStatus.AFTER_LEASE_HOOK_FAILED, - f"afterLease hook failed: {e}", - ) - logger.error("Shutting down exporter due to afterLease hook failure") - self.stop() - except Exception as e: - logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) - await self._report_status( - ExporterStatus.AFTER_LEASE_HOOK_FAILED, - f"afterLease hook failed: {e}", - ) - - self._lease_name = status.lease_name + await self.hook_executor.run_after_lease_hook( + self._lease_scope, + self._report_status, + self.stop, + ) + logger.info("Lease status changed, killing existing connections") - # Reset event for next lease - self._before_lease_hook = None + # Clear lease scope for next lease + self._lease_scope = None self.stop() break @@ -494,47 +467,52 @@ async def serve(self): # noqa: C901 previous_leased = hasattr(self, "_previous_leased") and self._previous_leased current_leased = status.leased - self._lease_name = status.lease_name - if not self._started and self._lease_name != "": + # Check if this is a new lease assignment (first time or lease name changed) + if not self._started and status.lease_name != "": self._started = True - # Create event for pre-lease synchronization - self._before_lease_hook = Event() - tg.start_soon(self.handle_lease, self._lease_name, tg) + # Create lease scope and start handling the lease + # The session will be created inside handle_lease and stay open for the lease duration + lease_scope = LeaseContext( + lease_name=status.lease_name, + before_lease_hook=Event(), + ) + self._lease_scope = lease_scope + tg.start_soon(self.handle_lease, status.lease_name, tg, lease_scope) if current_leased: logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) - self._current_client_name = status.client_name + if self._lease_scope: + self._lease_scope.update_client(status.client_name) # Before-lease hook when transitioning from unleased to leased if not previous_leased: - if self.hook_executor: - hook_context = HookContext(status.lease_name, status.client_name) - tg.start_soon(self.run_before_lease_hook, hook_context) + if self.hook_executor and self._lease_scope: + tg.start_soon( + self.hook_executor.run_before_lease_hook, + self._lease_scope, + self._report_status, + self.stop, # Pass shutdown callback + ) else: # No hook configured, set event immediately await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") - if self._before_lease_hook: - self._before_lease_hook.set() + if self._lease_scope: + self._lease_scope.before_lease_hook.set() else: logger.info("Currently not leased") # After-lease hook when transitioning from leased to unleased - if previous_leased and self.hook_executor and self._current_client_name: - hook_context = HookContext(self._lease_name, self._current_client_name) - # Shield the after-lease hook from cancellation and await it + if previous_leased and self.hook_executor and self._lease_scope and self._lease_scope.has_client(): + # Shield the after-lease hook from cancellation with CancelScope(shield=True): - await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") - # Pass the current session to hook executor for logging - self.hook_executor.main_session = self._current_session - # Use session socket if available, otherwise create new session - await self.hook_executor.execute_after_lease_hook( - hook_context, socket_path=self._session_socket_path + await self.hook_executor.run_after_lease_hook( + self._lease_scope, + self._report_status, + self.stop, ) - await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") - self._current_client_name = "" - # Reset event for next lease - self._before_lease_hook = None + # Clear lease scope for next lease + self._lease_scope = None if self._stop_requested: self.stop(should_unregister=True) @@ -542,69 +520,3 @@ async def serve(self): # noqa: C901 self._previous_leased = current_leased self._tg = None - - async def run_before_lease_hook(self, hook_ctx: HookContext): - """ - Execute the before-lease hook for the current exporter session. - - Args: - hook_ctx (HookContext): The current hook execution context - """ - try: - await self._report_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks") - # Pass the current session to hook executor for logging - self.hook_executor.main_session = self._current_session - - # Wait for socket path to be available - while self._session_socket_path is None: - await sleep(0.1) - - # Execute hook with main session socket - await self.hook_executor.execute_before_lease_hook(hook_ctx, socket_path=self._session_socket_path) - await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") - logger.info("beforeLease hook completed successfully") - except HookExecutionError as e: - # Hook failed with on_failure='block' - end lease and set failed status - logger.error("beforeLease hook failed (on_failure=block): %s", e) - await self._report_status( - ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed (on_failure=block): {e}" - ) - # Note: We don't take the exporter offline for before_lease hook failures - # The lease is simply not ready, and the exporter remains available for future leases - except Exception as e: - # Unexpected error during hook execution - logger.error("beforeLease hook failed with unexpected error: %s", e, exc_info=True) - await self._report_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed: {e}") - finally: - # Always set the event to unblock connections - if self._before_lease_hook: - self._before_lease_hook.set() - - async def run_after_lease_hook(self, hook_ctx: HookContext): - """ - Execute the after-lease hook for the current exporter session. - - Args: - hook_ctx (HookContext): The current hook execution context - """ - try: - await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") - # Pass the current session to hook executor for logging - self.hook_executor.main_session = self._current_session - # Use session socket if available, otherwise create new session - await self.hook_executor.execute_after_lease_hook(hook_ctx, socket_path=self._session_socket_path) - await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") - logger.info("afterLease hook completed successfully") - except HookExecutionError as e: - # Hook failed with on_failure='block' - set failed status and shut down exporter - logger.error("afterLease hook failed (on_failure=block): %s", e) - await self._report_status( - ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed (on_failure=block): {e}" - ) - # Shut down the exporter after after_lease hook failure with on_failure='block' - logger.error("Shutting down exporter due to afterLease hook failure") - self.stop() - except Exception as e: - # Unexpected error during hook execution - logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) - await self._report_status(ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed: {e}") diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py index a3827560..3a76fda7 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -3,35 +3,47 @@ import asyncio import logging import os -from contextlib import asynccontextmanager -from dataclasses import dataclass, field -from typing import AsyncGenerator, Callable +from collections.abc import Awaitable +from dataclasses import dataclass +from typing import TYPE_CHECKING, Callable, Literal -from jumpstarter.common import LogSource +from jumpstarter.common import ExporterStatus, LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 -from jumpstarter.driver import Driver from jumpstarter.exporter.logging import get_logger from jumpstarter.exporter.session import Session +if TYPE_CHECKING: + from jumpstarter.driver import Driver + from jumpstarter.exporter.lease_context import LeaseContext + logger = logging.getLogger(__name__) +@dataclass class HookExecutionError(Exception): - """Raised when a hook fails and on_failure is set to 'endLease' or 'exit'.""" + """Raised when a hook fails and on_failure is set to 'endLease' or 'exit'. - pass + Attributes: + message: Error message describing the failure + on_failure: The on_failure mode that triggered this error ('endLease' or 'exit') + hook_type: The type of hook that failed ('before_lease' or 'after_lease') + """ + message: str + on_failure: Literal["endLease", "exit"] + hook_type: Literal["before_lease", "after_lease"] -@dataclass(kw_only=True) -class HookContext: - """Context information passed to hooks.""" + def __str__(self) -> str: + return self.message - lease_name: str - client_name: str = "" - lease_duration: str = "" - exporter_name: str = "" - exporter_namespace: str = "" + def should_shutdown_exporter(self) -> bool: + """Returns True if the exporter should be shut down entirely.""" + return self.on_failure == "exit" + + def should_end_lease(self) -> bool: + """Returns True if the lease should be ended.""" + return self.on_failure in ("endLease", "exit") @dataclass(kw_only=True) @@ -39,15 +51,13 @@ class HookExecutor: """Executes lifecycle hooks with access to the j CLI.""" config: HookConfigV1Alpha1 - device_factory: Callable[[], Driver] - main_session: Session | None = field(default=None) + device_factory: Callable[[], "Driver"] - def _create_hook_env(self, context: HookContext, socket_path: str) -> dict[str, str]: + def _create_hook_env(self, lease_scope: "LeaseContext") -> dict[str, str]: """Create standardized hook environment variables. Args: - context: Hook context information - socket_path: Path to the Unix socket for JUMPSTARTER_HOST + lease_scope: LeaseScope containing lease metadata and socket path Returns: Dictionary of environment variables for hook execution @@ -55,51 +65,26 @@ def _create_hook_env(self, context: HookContext, socket_path: str) -> dict[str, hook_env = os.environ.copy() hook_env.update( { - JUMPSTARTER_HOST: str(socket_path), + JUMPSTARTER_HOST: str(lease_scope.socket_path), JMP_DRIVERS_ALLOW: "UNSAFE", # Allow all drivers for local access - "LEASE_NAME": context.lease_name, - "CLIENT_NAME": context.client_name, - "LEASE_DURATION": context.lease_duration, - "EXPORTER_NAME": context.exporter_name, - "EXPORTER_NAMESPACE": context.exporter_namespace, + "LEASE_NAME": lease_scope.lease_name, + "CLIENT_NAME": lease_scope.client_name, } ) return hook_env - @asynccontextmanager - async def _create_hook_environment( - self, context: HookContext - ) -> AsyncGenerator[tuple[Session, dict[str, str]], None]: - """Create a local session and Unix socket for j CLI access.""" - with Session( - root_device=self.device_factory(), - # Use hook context for metadata - labels={ - "jumpstarter.dev/hook-context": "true", - "jumpstarter.dev/lease": context.lease_name, - }, - ) as session: - async with session.serve_unix_async() as unix_path: - # Create environment variables for the hook - hook_env = self._create_hook_env(context, unix_path) - - yield session, hook_env - async def _execute_hook( self, hook_config: HookInstanceConfigV1Alpha1, - context: HookContext, + lease_scope: "LeaseContext", log_source: LogSource, - socket_path: str | None = None, ) -> None: """Execute a single hook command. Args: hook_config: Hook configuration including script, timeout, and on_failure - context: Hook context information + lease_scope: LeaseScope containing lease metadata and session log_source: Log source for hook output - socket_path: Optional Unix socket path to reuse existing session. - If provided, hooks will access the main session instead of creating their own. """ command = hook_config.script if not command or not command.strip(): @@ -108,37 +93,70 @@ async def _execute_hook( logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) - # If socket_path provided, use existing session; otherwise create new one - if socket_path is not None: - # Reuse existing session - create environment without session creation - hook_env = self._create_hook_env(context, socket_path) + # Determine hook type from log source + hook_type = "before_lease" if log_source == LogSource.BEFORE_LEASE_HOOK else "after_lease" - # Use main session for logging (must be available when socket_path is provided) - logging_session = self.main_session - if logging_session is None: - raise ValueError("main_session must be set when reusing socket_path") + # Use existing session from lease_scope + hook_env = self._create_hook_env(lease_scope) + + return await self._execute_hook_process( + hook_config, lease_scope, log_source, hook_env, lease_scope.session, hook_type + ) + + def _handle_hook_failure( + self, + error_msg: str, + on_failure: Literal["warn", "endLease", "exit"], + hook_type: Literal["before_lease", "after_lease"], + cause: Exception | None = None, + ) -> None: + """Handle hook failure according to on_failure setting. + + Args: + error_msg: Error message describing the failure + on_failure: The on_failure mode ('warn', 'endLease', or 'exit') + hook_type: The type of hook that failed + cause: Optional exception that caused the failure + + Raises: + HookExecutionError: If on_failure is 'endLease' or 'exit' + """ + if on_failure == "warn": + logger.warning("%s (on_failure=warn, continuing)", error_msg) + return - return await self._execute_hook_process(hook_config, context, log_source, hook_env, logging_session) + logger.error("%s (on_failure=%s, raising exception)", error_msg, on_failure) + + error = HookExecutionError( + message=error_msg, + on_failure=on_failure, + hook_type=hook_type, + ) + + # Properly handle exception chaining + if cause is not None: + raise error from cause else: - # Create new session for hook execution (fallback/standalone mode) - async with self._create_hook_environment(context) as (session, hook_env): - # Determine which session to use for logging - logging_session = self.main_session if self.main_session is not None else session - return await self._execute_hook_process(hook_config, context, log_source, hook_env, logging_session) + raise error async def _execute_hook_process( self, hook_config: HookInstanceConfigV1Alpha1, - context: HookContext, + lease_scope: "LeaseContext", log_source: LogSource, hook_env: dict[str, str], logging_session: Session, + hook_type: Literal["before_lease", "after_lease"], ) -> None: """Execute the hook process with the given environment and logging session.""" command = hook_config.script timeout = hook_config.timeout on_failure = hook_config.on_failure + # Exception handling + error_msg: str | None = None + cause: Exception | None = None + try: # Execute the hook command using shell process = await asyncio.create_subprocess_shell( @@ -150,7 +168,7 @@ async def _execute_hook_process( try: # Create a logger with automatic source registration - hook_logger = get_logger(f"hook.{context.lease_name}", log_source, logging_session) + hook_logger = get_logger(f"hook.{lease_scope.lease_name}", log_source, logging_session) # Stream output line-by-line for real-time logging output_lines = [] @@ -172,61 +190,37 @@ async def read_output(): if process.returncode == 0: logger.info("Hook executed successfully") return - else: - # Non-zero exit code is a failure - handle according to on_failure setting - error_msg = f"Hook failed with exit code {process.returncode}" - - if on_failure == "warn": - logger.warning("%s (on_failure=warn, continuing)", error_msg) - return - elif on_failure == "endLease": - logger.error("%s (on_failure=endLease, raising exception)", error_msg) - raise HookExecutionError(error_msg) - else: # on_failure == "exit" - logger.error("%s (on_failure=exit, raising exception)", error_msg) - raise HookExecutionError(error_msg) + + # Non-zero exit code is a failure + error_msg = f"Hook failed with exit code {process.returncode}" except asyncio.TimeoutError as e: error_msg = f"Hook timed out after {timeout} seconds" + cause = e logger.error(error_msg) try: + # Attempt to gracefully terminate the process process.terminate() await asyncio.wait_for(process.wait(), timeout=5) except asyncio.TimeoutError: + # Force kill if it didn't terminate in time process.kill() await process.wait() - # Handle timeout according to on_failure setting - if on_failure == "warn": - logger.warning("%s (on_failure=warn, continuing)", error_msg) - return - elif on_failure == "endLease": - raise HookExecutionError(error_msg) from e - else: # on_failure == "exit" - raise HookExecutionError(error_msg) from e - - except HookExecutionError: - # Re-raise HookExecutionError to propagate to exporter - raise except Exception as e: error_msg = f"Error executing hook: {e}" + cause = e logger.error(error_msg, exc_info=True) - # Handle exception according to on_failure setting - if on_failure == "warn": - logger.warning("%s (on_failure=warn, continuing)", error_msg) - return - elif on_failure == "endLease": - raise HookExecutionError(error_msg) from e - else: # on_failure == "exit" - raise HookExecutionError(error_msg) from e + # Handle failure if one occurred + if error_msg is not None: + self._handle_hook_failure(error_msg, on_failure, hook_type, cause) - async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None) -> None: + async def execute_before_lease_hook(self, lease_scope: "LeaseContext") -> None: """Execute the before-lease hook. Args: - context: Hook context information - socket_path: Optional Unix socket path to reuse existing session + lease_scope: LeaseScope with lease metadata and session Raises: HookExecutionError: If hook fails and on_failure is set to 'endLease' or 'exit' @@ -235,20 +229,18 @@ async def execute_before_lease_hook(self, context: HookContext, socket_path: str logger.debug("No before-lease hook configured") return - logger.info("Executing before-lease hook for lease %s", context.lease_name) + logger.info("Executing before-lease hook for lease %s", lease_scope.lease_name) await self._execute_hook( self.config.before_lease, - context, + lease_scope, LogSource.BEFORE_LEASE_HOOK, - socket_path, ) - async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None) -> None: + async def execute_after_lease_hook(self, lease_scope: "LeaseContext") -> None: """Execute the after-lease hook. Args: - context: Hook context information - socket_path: Optional Unix socket path to reuse existing session + lease_scope: LeaseScope with lease metadata and session Raises: HookExecutionError: If hook fails and on_failure is set to 'endLease' or 'exit' @@ -257,10 +249,154 @@ async def execute_after_lease_hook(self, context: HookContext, socket_path: str logger.debug("No after-lease hook configured") return - logger.info("Executing after-lease hook for lease %s", context.lease_name) + logger.info("Executing after-lease hook for lease %s", lease_scope.lease_name) await self._execute_hook( self.config.after_lease, - context, + lease_scope, LogSource.AFTER_LEASE_HOOK, - socket_path, ) + + async def run_before_lease_hook( + self, + lease_scope: "LeaseContext", + report_status: Callable[["ExporterStatus", str], Awaitable[None]], + shutdown: Callable[[], None], + ) -> None: + """Execute before-lease hook with full orchestration. + + This method handles the complete lifecycle of running a before-lease hook: + - Waits for the lease scope to be ready (session/socket populated) + - Reports status changes via the provided callback + - Sets up the hook executor with the session for logging + - Executes the hook and handles errors + - Always signals the before_lease_hook event to unblock connections + + Args: + lease_scope: LeaseScope containing session, socket_path, and sync event + report_status: Async callback to report status changes to controller + shutdown: Callback to trigger exporter shutdown on critical failures + """ + try: + # Wait for lease scope to be fully populated by handle_lease + assert lease_scope.is_ready(), "LeaseScope must be fully initialized before running before-lease hooks" + + # Check if hook is configured + if not self.config.before_lease: + logger.debug("No before-lease hook configured") + await report_status(ExporterStatus.LEASE_READY, "Ready for commands") + return + + await report_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hook") + + # Execute hook with lease scope + logger.info("Executing before-lease hook for lease %s", lease_scope.lease_name) + await self._execute_hook( + self.config.before_lease, + lease_scope, + LogSource.BEFORE_LEASE_HOOK, + ) + + await report_status(ExporterStatus.LEASE_READY, "Ready for commands") + logger.info("beforeLease hook completed successfully") + + except HookExecutionError as e: + if e.should_shutdown_exporter(): + # on_failure='exit' - shut down the entire exporter + logger.error("beforeLease hook failed with on_failure='exit': %s", e) + await report_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, + f"beforeLease hook failed (on_failure=exit, shutting down): {e}", + ) + logger.error("Shutting down exporter due to beforeLease hook failure with on_failure='exit'") + shutdown() + else: + # on_failure='endLease' - just block this lease, exporter stays available + logger.error("beforeLease hook failed with on_failure='endLease': %s", e) + await report_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, + f"beforeLease hook failed (on_failure=endLease): {e}", + ) + # TODO: We need to implement a controller-side mechanism to end the lease here + + except Exception as e: + logger.error("beforeLease hook failed with unexpected error: %s", e, exc_info=True) + await report_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, + f"beforeLease hook failed: {e}", + ) + # Unexpected errors don't trigger shutdown - just block the lease + + finally: + # Always set the event to unblock connections + lease_scope.before_lease_hook.set() + + async def run_after_lease_hook( + self, + lease_scope: "LeaseContext", + report_status: Callable[["ExporterStatus", str], Awaitable[None]], + shutdown: Callable[[], None], + ) -> None: + """Execute after-lease hook with full orchestration. + + This method handles the complete lifecycle of running an after-lease hook: + - Validates that the lease scope is ready + - Reports status changes via the provided callback + - Sets up the hook executor with the session for logging + - Executes the hook and handles errors + - Triggers shutdown on critical failures (HookExecutionError) + + Args: + lease_scope: LeaseScope containing session, socket_path, and client info + report_status: Async callback to report status changes to controller + shutdown: Callback to trigger exporter shutdown on critical failures + """ + try: + # Verify lease scope is ready + assert lease_scope.is_ready(), "LeaseScope must be fully initialized before running after-lease hooks" + + # Check if hook is configured + if not self.config.after_lease: + logger.debug("No after-lease hook configured") + await report_status(ExporterStatus.AVAILABLE, "Available for new lease") + return + + await report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + + # Execute hook with lease scope + logger.info("Executing after-lease hook for lease %s", lease_scope.lease_name) + await self._execute_hook( + self.config.after_lease, + lease_scope, + LogSource.AFTER_LEASE_HOOK, + ) + + await report_status(ExporterStatus.AVAILABLE, "Available for new lease") + logger.info("afterLease hook completed successfully") + + except HookExecutionError as e: + if e.should_shutdown_exporter(): + # on_failure='exit' - shut down the entire exporter + logger.error("afterLease hook failed with on_failure='exit': %s", e) + await report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed (on_failure=exit, shutting down): {e}", + ) + logger.error("Shutting down exporter due to afterLease hook failure with on_failure='exit'") + shutdown() + else: + # on_failure='endLease' - lease already ended, just report the failure + # The exporter remains available for new leases + logger.error("afterLease hook failed with on_failure='endLease': %s", e) + await report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed (on_failure=endLease): {e}", + ) + # Note: Lease has already ended - no shutdown needed, exporter remains available + + except Exception as e: + logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) + await report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed: {e}", + ) + # Unexpected errors don't trigger shutdown - exporter remains available diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py index f31257df..d39a6ecc 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -7,7 +7,7 @@ from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 from jumpstarter.driver import Driver -from jumpstarter.exporter.hooks import HookContext, HookExecutionError, HookExecutor +from jumpstarter.exporter.hooks import HookExecutionError, HookExecutor pytestmark = pytest.mark.anyio @@ -41,14 +41,21 @@ def hook_config() -> HookConfigV1Alpha1: @pytest.fixture -def hook_context() -> HookContext: - return HookContext( +def lease_scope(): + from anyio import Event + + from jumpstarter.exporter.lease_context import LeaseContext + + lease_scope = LeaseContext( lease_name="test-lease-123", + before_lease_hook=Event(), client_name="test-client", - lease_duration="30m", - exporter_name="test-exporter", - exporter_namespace="default", ) + # Add mock session to lease_scope + mock_session = Mock() + lease_scope.session = mock_session + lease_scope.socket_path = "/tmp/test_socket" + return lease_scope class TestHookExecutor: @@ -61,7 +68,7 @@ async def test_hook_executor_creation(self, hook_config, mock_device_factory) -> assert executor.config == hook_config assert executor.device_factory == mock_device_factory - async def test_empty_hook_execution(self, mock_device_factory, hook_context) -> None: + async def test_empty_hook_execution(self, mock_device_factory, lease_scope) -> None: empty_config = HookConfigV1Alpha1() executor = HookExecutor( config=empty_config, @@ -69,277 +76,228 @@ async def test_empty_hook_execution(self, mock_device_factory, hook_context) -> ) # Both hooks should return None for empty/None commands - assert await executor.execute_before_lease_hook(hook_context) is None - assert await executor.execute_after_lease_hook(hook_context) is None + assert await executor.execute_before_lease_hook(lease_scope) is None + assert await executor.execute_after_lease_hook(lease_scope) is None - async def test_successful_hook_execution(self, mock_device_factory, hook_context) -> None: + async def test_successful_hook_execution(self, mock_device_factory, lease_scope) -> None: hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) - # Mock the Session and serve_unix_async - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - # Mock the async context manager for serve_unix_async - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - # Mock asyncio.create_subprocess_shell to simulate successful execution - mock_process = AsyncMock() - mock_process.returncode = 0 - # Mock stdout.readline to simulate line-by-line output - mock_process.stdout.readline.side_effect = [ - b"Pre-lease hook executed\n", - b"", # EOF - ] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: - executor = HookExecutor( - config=hook_config, - device_factory=mock_device_factory, - ) - - result = await executor.execute_before_lease_hook(hook_context) - - assert result is None - - # Verify subprocess was called with correct environment - mock_subprocess.assert_called_once() - call_args = mock_subprocess.call_args - command = call_args[0][0] - env = call_args[1]["env"] - - assert command == "echo 'Pre-lease hook executed'" - assert JUMPSTARTER_HOST in env - assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" - assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" - assert env["LEASE_NAME"] == "test-lease-123" - assert env["CLIENT_NAME"] == "test-client" - - async def test_failed_hook_execution(self, mock_device_factory, hook_context) -> None: + # Mock asyncio.create_subprocess_shell to simulate successful execution + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock stdout.readline to simulate line-by-line output + mock_process.stdout.readline.side_effect = [ + b"Pre-lease hook executed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_before_lease_hook(lease_scope) + + assert result is None + + # Verify subprocess was called with correct environment + mock_subprocess.assert_called_once() + call_args = mock_subprocess.call_args + command = call_args[0][0] + env = call_args[1]["env"] + + assert command == "echo 'Pre-lease hook executed'" + assert JUMPSTARTER_HOST in env + assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" + assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" + assert env["LEASE_NAME"] == "test-lease-123" + assert env["CLIENT_NAME"] == "test-client" + + async def test_failed_hook_execution(self, mock_device_factory, lease_scope) -> None: failed_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1( script="exit 1", timeout=10, on_failure="endLease" ), # Command that will fail with on_failure="endLease" ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - # Mock failed process - mock_process = AsyncMock() - mock_process.returncode = 1 - # Mock stdout.readline for failed process - mock_process.stdout.readline.side_effect = [ - b"Command failed\n", - b"", # EOF - ] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process): - executor = HookExecutor( - config=failed_config, - device_factory=mock_device_factory, - ) - - # Should raise HookExecutionError since on_failure="endLease" - with pytest.raises(HookExecutionError, match="Hook failed with exit code 1"): - await executor.execute_before_lease_hook(hook_context) - - async def test_hook_timeout(self, mock_device_factory, hook_context) -> None: + # Mock failed process + mock_process = AsyncMock() + mock_process.returncode = 1 + # Mock stdout.readline for failed process + mock_process.stdout.readline.side_effect = [ + b"Command failed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor( + config=failed_config, + device_factory=mock_device_factory, + ) + + # Should raise HookExecutionError since on_failure="endLease" + with pytest.raises(HookExecutionError, match="Hook failed with exit code 1"): + await executor.execute_before_lease_hook(lease_scope) + + async def test_hook_timeout(self, mock_device_factory, lease_scope) -> None: timeout_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1( script="sleep 60", timeout=1, on_failure="exit" ), # Command that will timeout with on_failure="exit" ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + # Mock process that times out + mock_process = AsyncMock() + mock_process.stdout.readline.return_value = b"" # EOF + mock_process.terminate = AsyncMock(return_value=None) + mock_process.wait = AsyncMock(return_value=None) - # Mock process that times out - mock_process = AsyncMock() - mock_process.terminate.return_value = None - mock_process.wait.return_value = None + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + ): + executor = HookExecutor( + config=timeout_config, + device_factory=mock_device_factory, + ) - with ( - patch("asyncio.create_subprocess_shell", return_value=mock_process), - patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), - ): - executor = HookExecutor( - config=timeout_config, - device_factory=mock_device_factory, - ) + # Should raise HookExecutionError since on_failure="exit" + with pytest.raises(HookExecutionError, match="timed out after 1 seconds"): + await executor.execute_before_lease_hook(lease_scope) - # Should raise HookExecutionError since on_failure="exit" - with pytest.raises(HookExecutionError, match="timed out after 1 seconds"): - await executor.execute_before_lease_hook(hook_context) + mock_process.terminate.assert_called_once() - mock_process.terminate.assert_called_once() - - async def test_hook_environment_variables(self, mock_device_factory, hook_context) -> None: + async def test_hook_environment_variables(self, mock_device_factory, lease_scope) -> None: hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 0 - # Mock stdout.readline for environment test - mock_process.stdout.readline.side_effect = [ - b"", # EOF (no output) - ] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: - executor = HookExecutor( - config=hook_config, - device_factory=mock_device_factory, - ) - - await executor.execute_before_lease_hook(hook_context) - - # Check that all expected environment variables are set - call_args = mock_subprocess.call_args - env = call_args[1]["env"] - - assert env["LEASE_NAME"] == "test-lease-123" - assert env["CLIENT_NAME"] == "test-client" - assert env["LEASE_DURATION"] == "30m" - assert env["EXPORTER_NAME"] == "test-exporter" - assert env["EXPORTER_NAMESPACE"] == "default" - assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" - assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" - - async def test_real_time_output_logging(self, mock_device_factory, hook_context) -> None: + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock stdout.readline for environment test + mock_process.stdout.readline.side_effect = [ + b"", # EOF (no output) + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + await executor.execute_before_lease_hook(lease_scope) + + # Check that expected environment variables are set (unused fields removed) + call_args = mock_subprocess.call_args + env = call_args[1]["env"] + + assert env["LEASE_NAME"] == "test-lease-123" + assert env["CLIENT_NAME"] == "test-client" + # These fields are no longer set: + assert "LEASE_DURATION" not in env + assert "EXPORTER_NAME" not in env + assert "EXPORTER_NAMESPACE" not in env + assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" + assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" + + async def test_real_time_output_logging(self, mock_device_factory, lease_scope) -> None: """Test that hook output is logged in real-time at INFO level.""" hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1( - script="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", timeout=10 - ), + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", timeout=10), ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 0 - # Mock multiple lines of output to verify streaming - mock_process.stdout.readline.side_effect = [ - b"Line 1\n", - b"Line 2\n", - b"Line 3\n", - b"", # EOF + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock multiple lines of output to verify streaming + mock_process.stdout.readline.side_effect = [ + b"Line 1\n", + b"Line 2\n", + b"Line 3\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + # Mock the logger to capture log calls + with ( + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + patch("asyncio.create_subprocess_shell", return_value=mock_process), + ): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_before_lease_hook(lease_scope) + + assert result is None + + # Verify that output lines were logged in real-time at INFO level + expected_calls = [ + call("Executing before-lease hook for lease %s", "test-lease-123"), + call("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), + call("Hook executed successfully"), ] - mock_process.wait = AsyncMock(return_value=None) - - # Mock the logger to capture log calls - with ( - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - patch("asyncio.create_subprocess_shell", return_value=mock_process), - ): - executor = HookExecutor( - config=hook_config, - device_factory=mock_device_factory, - ) - - result = await executor.execute_before_lease_hook(hook_context) - - assert result is None - - # Verify that output lines were logged in real-time at INFO level - expected_calls = [ - call("Executing before-lease hook for lease %s", "test-lease-123"), - call("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), - call("Hook executed successfully"), - ] - mock_logger.info.assert_has_calls(expected_calls, any_order=False) - - async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, hook_context) -> None: + mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, lease_scope) -> None: """Test that post-lease hook executes when called directly.""" hook_config = HookConfigV1Alpha1( after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease cleanup completed'", timeout=10), ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 0 - # Mock post-lease hook output - mock_process.stdout.readline.side_effect = [ - b"Post-lease cleanup completed\n", - b"", # EOF + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock post-lease hook output + mock_process.stdout.readline.side_effect = [ + b"Post-lease cleanup completed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + # Mock the logger to capture log calls + with ( + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + patch("asyncio.create_subprocess_shell", return_value=mock_process), + ): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_after_lease_hook(lease_scope) + + assert result is None + + # Verify that post-lease hook output was logged + expected_calls = [ + call("Executing after-lease hook for lease %s", "test-lease-123"), + call("Executing hook: %s", "echo 'Post-lease cleanup completed'"), + call("Hook executed successfully"), ] - mock_process.wait = AsyncMock(return_value=None) - - # Mock the logger to capture log calls - with ( - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - patch("asyncio.create_subprocess_shell", return_value=mock_process), - ): - executor = HookExecutor( - config=hook_config, - device_factory=mock_device_factory, - ) - - result = await executor.execute_after_lease_hook(hook_context) - - assert result is None - - # Verify that post-lease hook output was logged - expected_calls = [ - call("Executing after-lease hook for lease %s", "test-lease-123"), - call("Executing hook: %s", "echo 'Post-lease cleanup completed'"), - call("Hook executed successfully"), - ] - mock_logger.info.assert_has_calls(expected_calls, any_order=False) - - async def test_hook_timeout_with_warn(self, mock_device_factory, hook_context) -> None: + mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + async def test_hook_timeout_with_warn(self, mock_device_factory, lease_scope) -> None: """Test that hook succeeds when timeout occurs but on_failure='warn'.""" hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="warn"), ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.terminate = AsyncMock(return_value=None) - mock_process.wait = AsyncMock(return_value=None) - - with ( - patch("asyncio.create_subprocess_shell", return_value=mock_process), - patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - ): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is None - # Verify WARNING log was created - assert any("on_failure=warn, continuing" in str(call) for call in mock_logger.warning.call_args_list) + mock_process = AsyncMock() + mock_process.stdout.readline.return_value = b"" # EOF + mock_process.terminate = AsyncMock(return_value=None) + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(lease_scope) + assert result is None + # Verify WARNING log was created + assert any("on_failure=warn, continuing" in str(call) for call in mock_logger.warning.call_args_list) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py b/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py new file mode 100644 index 00000000..f9caad72 --- /dev/null +++ b/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py @@ -0,0 +1,63 @@ +"""LeaseScope: Context manager for lease-related resources. + +This module provides a clean abstraction for managing the lifecycle of resources +associated with a lease, including the session, socket path, and synchronization events. +""" + +from dataclasses import dataclass, field + +from anyio import Event + +from jumpstarter.exporter.session import Session + + +@dataclass +class LeaseContext: + """Encapsulates all resources associated with an active lease. + + This class bundles together the session, socket path, synchronization event, + and lease identity information that are needed throughout the lease lifecycle. + By grouping these resources, we make their relationships and lifecycles explicit. + + Attributes: + lease_name: Name of the current lease assigned by the controller + session: The Session object managing the device and gRPC services (set in handle_lease) + socket_path: Unix socket path where the session is serving (set in handle_lease) + before_lease_hook: Event that signals when before-lease hook completes + client_name: Name of the client currently holding the lease (empty if unleased) + """ + + lease_name: str + before_lease_hook: Event + session: Session | None = None + socket_path: str = "" + client_name: str = field(default="") + + def __post_init__(self): + """Validate that required resources are present.""" + assert self.before_lease_hook is not None, "LeaseScope requires a before_lease_hook event" + assert self.lease_name, "LeaseScope requires a non-empty lease_name" + + def is_ready(self) -> bool: + """Check if the lease scope has been fully initialized with session and socket. + + Note: This checks for resource initialization (session/socket), not lease activity. + Use is_active() to check if the lease itself is active. + """ + return self.session is not None and self.socket_path != "" + + def is_active(self) -> bool: + """Check if this lease is active (has a non-empty lease name).""" + return bool(self.lease_name) + + def has_client(self) -> bool: + """Check if a client is currently holding the lease.""" + return bool(self.client_name) + + def update_client(self, client_name: str): + """Update the client name for this lease.""" + self.client_name = client_name + + def clear_client(self): + """Clear the client name when the lease is no longer held.""" + self.client_name = "" diff --git a/python/packages/jumpstarter/jumpstarter/exporter/session.py b/python/packages/jumpstarter/jumpstarter/exporter/session.py index 13d1a462..63d1e9a0 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/session.py @@ -4,7 +4,7 @@ from contextlib import asynccontextmanager, contextmanager, suppress from dataclasses import dataclass, field from logging.handlers import QueueHandler -from typing import Self +from typing import TYPE_CHECKING, Self from uuid import UUID import grpc @@ -19,11 +19,13 @@ from .logging import LogHandler from jumpstarter.common import ExporterStatus, LogSource, Metadata, TemporarySocket from jumpstarter.common.streams import StreamRequestMetadata -from jumpstarter.driver import Driver from jumpstarter.streams.common import forward_stream from jumpstarter.streams.metadata import MetadataStreamAttributes from jumpstarter.streams.router import RouterStream +if TYPE_CHECKING: + from jumpstarter.driver import Driver + logger = logging.getLogger(__name__) @@ -34,8 +36,8 @@ class Session( Metadata, ContextManagerMixin, ): - root_device: Driver - mapping: dict[UUID, Driver] + root_device: "Driver" + mapping: dict[UUID, "Driver"] _logging_queue: deque = field(init=False) _logging_handler: QueueHandler = field(init=False) From 6c632ebcef1095ab9573a2521b6e0b5f2d2c4183 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 14:34:28 -0500 Subject: [PATCH 16/30] Fix broken tests due to field name change and status not being correct --- .../jumpstarter-testing/jumpstarter_testing/pytest_test.py | 3 +++ python/packages/jumpstarter/jumpstarter/client/core.py | 2 +- python/packages/jumpstarter/jumpstarter/common/utils.py | 4 ++++ python/packages/jumpstarter/jumpstarter/config/exporter.py | 3 +++ python/packages/jumpstarter/jumpstarter/exporter/session.py | 2 +- 5 files changed, 12 insertions(+), 2 deletions(-) diff --git a/python/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py b/python/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py index f1697cc2..073bdb89 100644 --- a/python/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py +++ b/python/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py @@ -1,6 +1,7 @@ from jumpstarter_driver_power.driver import MockPower from pytest import Pytester +from jumpstarter.common import ExporterStatus from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.exporter import Session @@ -18,6 +19,8 @@ def test_simple(self, client): with Session(root_device=MockPower()) as session: with session.serve_unix() as path: + # For local testing, set status to LEASE_READY since there's no lease/hook flow + session.update_status(ExporterStatus.LEASE_READY) monkeypatch.setenv(JUMPSTARTER_HOST, str(path)) monkeypatch.setenv(JMP_DRIVERS_ALLOW, "UNSAFE") result = pytester.runpytest() diff --git a/python/packages/jumpstarter/jumpstarter/client/core.py b/python/packages/jumpstarter/jumpstarter/client/core.py index 2f6491db..2fdffcb3 100644 --- a/python/packages/jumpstarter/jumpstarter/client/core.py +++ b/python/packages/jumpstarter/jumpstarter/client/core.py @@ -89,7 +89,7 @@ async def check_exporter_status(self): status = ExporterStatus.from_proto(response.status) if status != ExporterStatus.LEASE_READY: - raise ExporterNotReady(f"Exporter status is {status}: {response.status_message}") + raise ExporterNotReady(f"Exporter status is {status}: {response.message}") except AioRpcError as e: # If GetStatus is not implemented, assume ready for backward compatibility diff --git a/python/packages/jumpstarter/jumpstarter/common/utils.py b/python/packages/jumpstarter/jumpstarter/common/utils.py index 6a0fa8f1..c23a8e46 100644 --- a/python/packages/jumpstarter/jumpstarter/common/utils.py +++ b/python/packages/jumpstarter/jumpstarter/common/utils.py @@ -22,8 +22,12 @@ @asynccontextmanager async def serve_async(root_device: "Driver", portal: BlockingPortal, stack: ExitStack): + from jumpstarter.common import ExporterStatus + with Session(root_device=root_device) as session: async with session.serve_unix_async() as path: + # For local testing, set status to LEASE_READY since there's no lease/hook flow + session.update_status(ExporterStatus.LEASE_READY) # SAFETY: the root_device instance is constructed locally thus considered trusted async with client_from_path(path, portal, stack, allow=[], unsafe=True) as client: try: diff --git a/python/packages/jumpstarter/jumpstarter/config/exporter.py b/python/packages/jumpstarter/jumpstarter/config/exporter.py index b4998caf..4122cd37 100644 --- a/python/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/config/exporter.py @@ -183,6 +183,7 @@ def delete(cls, alias: str) -> Path: @asynccontextmanager async def serve_unix_async(self): # dynamic import to avoid circular imports + from jumpstarter.common import ExporterStatus from jumpstarter.exporter import Session with Session( @@ -193,6 +194,8 @@ async def serve_unix_async(self): ).instantiate(), ) as session: async with session.serve_unix_async() as path: + # For local usage, set status to LEASE_READY since there's no lease/hook flow + session.update_status(ExporterStatus.LEASE_READY) yield path @contextmanager diff --git a/python/packages/jumpstarter/jumpstarter/exporter/session.py b/python/packages/jumpstarter/jumpstarter/exporter/session.py index 63d1e9a0..663c39e0 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/session.py @@ -174,5 +174,5 @@ async def GetStatus(self, request, context): logger.debug("GetStatus() -> %s", self._current_status) return jumpstarter_pb2.GetStatusResponse( status=self._current_status.to_proto(), - status_message=self._status_message, + message=self._status_message, ) From f690b79dcffafa09c78e1ecae94384e6cca1710b Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 14:53:21 -0500 Subject: [PATCH 17/30] Fix typing issues --- .../jumpstarter/jumpstarter/client/core.py | 2 ++ .../jumpstarter/jumpstarter/client/grpc.py | 3 +-- .../jumpstarter/common/__init__.py | 12 ++++++++- .../jumpstarter/jumpstarter/common/types.py | 25 +++++++++++++++++++ .../jumpstarter/jumpstarter/config/client.py | 2 +- .../jumpstarter/config/exporter.py | 2 +- 6 files changed, 41 insertions(+), 5 deletions(-) create mode 100644 python/packages/jumpstarter/jumpstarter/common/types.py diff --git a/python/packages/jumpstarter/jumpstarter/client/core.py b/python/packages/jumpstarter/jumpstarter/client/core.py index 2fdffcb3..4f859f23 100644 --- a/python/packages/jumpstarter/jumpstarter/client/core.py +++ b/python/packages/jumpstarter/jumpstarter/client/core.py @@ -2,6 +2,8 @@ Base classes for drivers and driver clients """ +from __future__ import annotations + import logging from contextlib import asynccontextmanager from dataclasses import dataclass, field diff --git a/python/packages/jumpstarter/jumpstarter/client/grpc.py b/python/packages/jumpstarter/jumpstarter/client/grpc.py index 0bfd06e9..a8c352d9 100644 --- a/python/packages/jumpstarter/jumpstarter/client/grpc.py +++ b/python/packages/jumpstarter/jumpstarter/client/grpc.py @@ -5,7 +5,6 @@ from dataclasses import InitVar, dataclass, field from datetime import datetime, timedelta from types import SimpleNamespace -from typing import Any from google.protobuf import duration_pb2, field_mask_pb2, json_format, timestamp_pb2 from grpc import ChannelConnectivity @@ -489,7 +488,7 @@ class MultipathExporterStub: channels: InitVar[list[Channel]] - __stubs: dict[Channel, Any] = field(init=False, default_factory=OrderedDict) + __stubs: dict[Channel, SimpleNamespace] = field(init=False, default_factory=OrderedDict) def __post_init__(self, channels): for channel in channels: diff --git a/python/packages/jumpstarter/jumpstarter/common/__init__.py b/python/packages/jumpstarter/jumpstarter/common/__init__.py index 08645b47..8d6ba38b 100644 --- a/python/packages/jumpstarter/jumpstarter/common/__init__.py +++ b/python/packages/jumpstarter/jumpstarter/common/__init__.py @@ -1,12 +1,22 @@ from .enums import ExporterStatus, LogSource from .metadata import Metadata from .tempfile import TemporarySocket, TemporaryTcpListener, TemporaryUnixListener +from .types import ( + AsyncChannel, + ControllerStub, + ExporterStub, + RouterStub, +) __all__ = [ + "AsyncChannel", + "ControllerStub", "ExporterStatus", + "ExporterStub", "LogSource", "Metadata", + "RouterStub", "TemporarySocket", - "TemporaryUnixListener", "TemporaryTcpListener", + "TemporaryUnixListener", ] diff --git a/python/packages/jumpstarter/jumpstarter/common/types.py b/python/packages/jumpstarter/jumpstarter/common/types.py new file mode 100644 index 00000000..fb110492 --- /dev/null +++ b/python/packages/jumpstarter/jumpstarter/common/types.py @@ -0,0 +1,25 @@ +"""Type aliases for gRPC and Protobuf types.""" + +from typing import TYPE_CHECKING, TypeAlias + +from grpc.aio import Channel +from jumpstarter_protocol import jumpstarter_pb2_grpc, router_pb2_grpc + +# Stub type aliases (the generic Stub classes work for both sync and async) +ExporterStub: TypeAlias = jumpstarter_pb2_grpc.ExporterServiceStub +RouterStub: TypeAlias = router_pb2_grpc.RouterServiceStub +ControllerStub: TypeAlias = jumpstarter_pb2_grpc.ControllerServiceStub + +# Channel type alias +AsyncChannel: TypeAlias = Channel + +# Async stub type aliases are only available for type checking (defined in .pyi files) +if TYPE_CHECKING: + pass + +__all__ = [ + "AsyncChannel", + "ControllerStub", + "ExporterStub", + "RouterStub", +] diff --git a/python/packages/jumpstarter/jumpstarter/config/client.py b/python/packages/jumpstarter/jumpstarter/config/client.py index c6fc2d91..c2f7ac19 100644 --- a/python/packages/jumpstarter/jumpstarter/config/client.py +++ b/python/packages/jumpstarter/jumpstarter/config/client.py @@ -120,7 +120,7 @@ class ClientConfigV1Alpha1(BaseSettings): leases: ClientConfigV1Alpha1Lease = Field(default_factory=ClientConfigV1Alpha1Lease) - async def channel(self): + async def channel(self) -> grpc.aio.Channel: if self.endpoint is None or self.token is None: raise ConfigurationError("endpoint or token not set in client config") diff --git a/python/packages/jumpstarter/jumpstarter/config/exporter.py b/python/packages/jumpstarter/jumpstarter/config/exporter.py index 4122cd37..e70b00d5 100644 --- a/python/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/config/exporter.py @@ -212,7 +212,7 @@ async def create_exporter(self): from jumpstarter.exporter import Exporter - async def channel_factory(): + async def channel_factory() -> grpc.aio.Channel: if self.endpoint is None or self.token is None: raise ConfigurationError("endpoint or token not set in exporter config") credentials = grpc.composite_channel_credentials( From 321441ce2a44e34d1612d1289a089d01d44f325c Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 22:18:01 -0500 Subject: [PATCH 18/30] Fix controller registration issue --- .../jumpstarter/jumpstarter/exporter/exporter.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index 1efc2dff..e80ff186 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -238,12 +238,19 @@ def factory( return factory - async def _register_with_controller(self, channel: grpc.aio.Channel): - """Register the exporter with the controller.""" - exporter_stub = jumpstarter_pb2_grpc.ExporterServiceStub(channel) + async def _register_with_controller(self, local_channel: grpc.aio.Channel): + """Register the exporter with the controller. + + Args: + local_channel: The local Unix socket channel to get device reports from + """ + # Get device reports from the local session + exporter_stub = jumpstarter_pb2_grpc.ExporterServiceStub(local_channel) response: jumpstarter_pb2.GetReportResponse = await exporter_stub.GetReport(empty_pb2.Empty()) + + # Register with the REMOTE controller (not the local session) logger.info("Registering exporter with controller") - controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) + controller = await self._get_controller_stub() await controller.Register( jumpstarter_pb2.RegisterRequest( labels=self.labels, @@ -253,7 +260,6 @@ async def _register_with_controller(self, channel: grpc.aio.Channel): # Mark exporter as registered internally self._registered = True # Report that exporter is available to the controller - # TODO: Determine if the controller should handle this logic internally await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") async def _report_status(self, status: ExporterStatus, message: str = ""): From 763d1a7bd07d0c1ed4ac946a3d3eb78262e1ef64 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 22:50:59 -0500 Subject: [PATCH 19/30] Add status field to jmp admin get exporter --- .../jumpstarter_cli_admin/get_test.py | 50 ++++++++++++++++--- .../jumpstarter_kubernetes/exporters.py | 9 ++++ 2 files changed, 52 insertions(+), 7 deletions(-) diff --git a/python/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py b/python/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py index 4b12a807..a79ff7d0 100644 --- a/python/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py +++ b/python/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py @@ -301,7 +301,10 @@ def test_get_clients(_load_kube_config_mock, list_clients_mock: AsyncMock): kind="Exporter", metadata=V1ObjectMeta(name="test", namespace="testing", creation_timestamp="2024-01-01T21:00:00Z"), status=V1Alpha1ExporterStatus( - endpoint="grpc://example.com:443", credential=V1ObjectReference(name="test-credential"), devices=[] + endpoint="grpc://example.com:443", + credential=V1ObjectReference(name="test-credential"), + devices=[], + exporter_status="Available", ), ) @@ -318,7 +321,9 @@ def test_get_clients(_load_kube_config_mock, list_clients_mock: AsyncMock): "name": "test-credential" }, "devices": [], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } """ @@ -334,6 +339,8 @@ def test_get_clients(_load_kube_config_mock, list_clients_mock: AsyncMock): name: test-credential devices: [] endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null """ @@ -348,6 +355,7 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): result = runner.invoke(get, ["exporter", "test"]) assert result.exit_code == 0 assert "test" in result.output + assert "Available" in result.output assert "grpc://example.com:443" in result.output get_exporter_mock.reset_mock() @@ -396,6 +404,7 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="82a8ac0d-d7ff-4009-8948-18a3c5c607b1"), V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="f7cd30ac-64a3-42c6-ba31-b25f033b97c1"), ], + exporter_status="Available", ), ) @@ -425,7 +434,9 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): "uuid": "f7cd30ac-64a3-42c6-ba31-b25f033b97c1" } ], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } """ @@ -447,6 +458,8 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): hardware: rpi4 uuid: f7cd30ac-64a3-42c6-ba31-b25f033b97c1 endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null """ @@ -460,6 +473,7 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo result = runner.invoke(get, ["exporter", "test", "--devices"]) assert result.exit_code == 0 assert "test" in result.output + assert "Available" in result.output assert "grpc://example.com:443" in result.output assert "hardware:rpi4" in result.output assert "82a8ac0d-d7ff-4009-8948-18a3c5c607b1" in result.output @@ -510,6 +524,7 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo endpoint="grpc://example.com:443", credential=V1ObjectReference(name="test-credential"), devices=[], + exporter_status="Available", ), ), V1Alpha1Exporter( @@ -520,6 +535,7 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo endpoint="grpc://example.com:443", credential=V1ObjectReference(name="another-credential"), devices=[], + exporter_status="Available", ), ), ] @@ -541,7 +557,9 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo "name": "test-credential" }, "devices": [], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } }, { @@ -557,7 +575,9 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo "name": "another-credential" }, "devices": [], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } ], @@ -578,6 +598,8 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo name: test-credential devices: [] endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null - apiVersion: jumpstarter.dev/v1alpha1 kind: Exporter metadata: @@ -589,6 +611,8 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo name: another-credential devices: [] endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null kind: ExporterList """ @@ -609,6 +633,7 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): assert result.exit_code == 0 assert "test" in result.output assert "another" in result.output + assert "Available" in result.output list_exporters_mock.reset_mock() # List exporters JSON output @@ -655,6 +680,7 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): devices=[ V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="82a8ac0d-d7ff-4009-8948-18a3c5c607b1") ], + exporter_status="Available", ), ), V1Alpha1Exporter( @@ -667,6 +693,7 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): devices=[ V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="f7cd30ac-64a3-42c6-ba31-b25f033b97c1"), ], + exporter_status="Available", ), ), ] @@ -695,7 +722,9 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): "uuid": "82a8ac0d-d7ff-4009-8948-18a3c5c607b1" } ], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } }, { @@ -718,7 +747,9 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): "uuid": "f7cd30ac-64a3-42c6-ba31-b25f033b97c1" } ], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } ], @@ -742,6 +773,8 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): hardware: rpi4 uuid: 82a8ac0d-d7ff-4009-8948-18a3c5c607b1 endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null - apiVersion: jumpstarter.dev/v1alpha1 kind: Exporter metadata: @@ -756,6 +789,8 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): hardware: rpi4 uuid: f7cd30ac-64a3-42c6-ba31-b25f033b97c1 endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null kind: ExporterList """ @@ -774,6 +809,7 @@ def test_get_exporters_devices(_load_kube_config_mock, list_exporters_mock: Asyn assert result.exit_code == 0 assert "test" in result.output assert "another" in result.output + assert "Available" in result.output assert "hardware:rpi4" in result.output assert "82a8ac0d-d7ff-4009-8948-18a3c5c607b1" in result.output assert "f7cd30ac-64a3-42c6-ba31-b25f033b97c1" in result.output diff --git a/python/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py b/python/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py index 004c47ff..1ea45006 100644 --- a/python/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py +++ b/python/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py @@ -26,6 +26,8 @@ class V1Alpha1ExporterStatus(JsonBaseModel): credential: SerializeV1ObjectReference devices: list[V1Alpha1ExporterDevice] endpoint: str + exporter_status: str | None = Field(alias="exporterStatus", default=None) + status_message: str | None = Field(alias="statusMessage", default=None) class V1Alpha1Exporter(JsonBaseModel): @@ -55,6 +57,8 @@ def from_dict(dict: dict): devices=[V1Alpha1ExporterDevice(labels=d["labels"], uuid=d["uuid"]) for d in dict["status"]["devices"]] if "devices" in dict["status"] else [], + exporter_status=dict["status"].get("exporterStatus"), + status_message=dict["status"].get("statusMessage"), ), ) @@ -62,17 +66,20 @@ def from_dict(dict: dict): def rich_add_columns(cls, table, devices: bool = False): if devices: table.add_column("NAME", no_wrap=True) + table.add_column("STATUS") table.add_column("ENDPOINT") table.add_column("AGE") table.add_column("LABELS") table.add_column("UUID") else: table.add_column("NAME", no_wrap=True) + table.add_column("STATUS") table.add_column("ENDPOINT") table.add_column("DEVICES") table.add_column("AGE") def rich_add_rows(self, table, devices: bool = False): + status = self.status.exporter_status if self.status else "Unknown" if devices: if self.status is not None: for d in self.status.devices: @@ -82,6 +89,7 @@ def rich_add_rows(self, table, devices: bool = False): labels.append(f"{label}:{str(d.labels[label])}") table.add_row( self.metadata.name, + status or "Unknown", self.status.endpoint, time_since(self.metadata.creation_timestamp), ",".join(labels), @@ -91,6 +99,7 @@ def rich_add_rows(self, table, devices: bool = False): else: table.add_row( self.metadata.name, + status or "Unknown", self.status.endpoint, str(len(self.status.devices) if self.status and self.status.devices else 0), time_since(self.metadata.creation_timestamp), From 5ea9475400ca70310617e6a2d7b0c260816b003b Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 23:01:37 -0500 Subject: [PATCH 20/30] Fix lease status race condition causing E2E tests to fail --- .../jumpstarter/exporter/exporter.py | 50 +++++++++++-------- .../jumpstarter/exporter/lease_context.py | 25 +++++++++- 2 files changed, 53 insertions(+), 22 deletions(-) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index e80ff186..948de420 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -132,7 +132,7 @@ class Exporter(AsyncContextManagerMixin, Metadata): AFTER_LEASE_HOOK, BEFORE_LEASE_HOOK_FAILED, AFTER_LEASE_HOOK_FAILED. """ - _lease_scope: LeaseContext | None = field(init=False, default=None) + _lease_context: LeaseContext | None = field(init=False, default=None) """Encapsulates all resources associated with the current lease. Contains the session, socket path, and synchronization event needed @@ -266,9 +266,10 @@ async def _report_status(self, status: ExporterStatus, message: str = ""): """Report the exporter status with the controller and session.""" self._exporter_status = status - # Update session status if available - if self._lease_scope and self._lease_scope.session: - self._lease_scope.session.update_status(status, message) + # Update status in lease context (handles session update internally) + # This ensures status is stored even before session is created + if self._lease_context: + self._lease_context.update_status(status, message) try: controller = await self._get_controller_stub() @@ -409,6 +410,10 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseC # Populate the lease scope with session and socket path lease_scope.session = session lease_scope.socket_path = path + # Sync current status to the newly created session + # This ensures the session has the correct status even if _report_status + # was called before the session was created (race condition fix) + session.update_status(lease_scope.current_status, lease_scope.status_message) # Wait for before-lease hook to complete before processing client connections logger.info("Waiting for before-lease hook to complete before accepting connections") @@ -449,23 +454,23 @@ async def serve(self): # noqa: C901 async for status in status_rx: # Check if lease name changed (and there was a previous active lease) lease_changed = ( - self._lease_scope - and self._lease_scope.is_active() - and self._lease_scope.lease_name != status.lease_name + self._lease_context + and self._lease_context.is_active() + and self._lease_context.lease_name != status.lease_name ) if lease_changed: # After-lease hook for the previous lease (lease name changed) - if self.hook_executor and self._lease_scope.has_client(): + if self.hook_executor and self._lease_context.has_client(): with CancelScope(shield=True): await self.hook_executor.run_after_lease_hook( - self._lease_scope, + self._lease_context, self._report_status, self.stop, ) logger.info("Lease status changed, killing existing connections") # Clear lease scope for next lease - self._lease_scope = None + self._lease_context = None self.stop() break @@ -482,43 +487,48 @@ async def serve(self): # noqa: C901 lease_name=status.lease_name, before_lease_hook=Event(), ) - self._lease_scope = lease_scope + self._lease_context = lease_scope tg.start_soon(self.handle_lease, status.lease_name, tg, lease_scope) if current_leased: logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) - if self._lease_scope: - self._lease_scope.update_client(status.client_name) + if self._lease_context: + self._lease_context.update_client(status.client_name) # Before-lease hook when transitioning from unleased to leased if not previous_leased: - if self.hook_executor and self._lease_scope: + if self.hook_executor and self._lease_context: tg.start_soon( self.hook_executor.run_before_lease_hook, - self._lease_scope, + self._lease_context, self._report_status, self.stop, # Pass shutdown callback ) else: # No hook configured, set event immediately await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") - if self._lease_scope: - self._lease_scope.before_lease_hook.set() + if self._lease_context: + self._lease_context.before_lease_hook.set() else: logger.info("Currently not leased") # After-lease hook when transitioning from leased to unleased - if previous_leased and self.hook_executor and self._lease_scope and self._lease_scope.has_client(): + if ( + previous_leased + and self.hook_executor + and self._lease_context + and self._lease_context.has_client() + ): # Shield the after-lease hook from cancellation with CancelScope(shield=True): await self.hook_executor.run_after_lease_hook( - self._lease_scope, + self._lease_context, self._report_status, self.stop, ) # Clear lease scope for next lease - self._lease_scope = None + self._lease_context = None if self._stop_requested: self.stop(should_unregister=True) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py b/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py index f9caad72..9e887839 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py @@ -5,10 +5,14 @@ """ from dataclasses import dataclass, field +from typing import TYPE_CHECKING from anyio import Event -from jumpstarter.exporter.session import Session +from jumpstarter.common import ExporterStatus + +if TYPE_CHECKING: + from jumpstarter.exporter.session import Session @dataclass @@ -25,13 +29,17 @@ class LeaseContext: socket_path: Unix socket path where the session is serving (set in handle_lease) before_lease_hook: Event that signals when before-lease hook completes client_name: Name of the client currently holding the lease (empty if unleased) + current_status: Current exporter status (stored here for access before session is created) + status_message: Message describing the current status """ lease_name: str before_lease_hook: Event - session: Session | None = None + session: "Session | None" = None socket_path: str = "" client_name: str = field(default="") + current_status: ExporterStatus = field(default=ExporterStatus.AVAILABLE) + status_message: str = field(default="") def __post_init__(self): """Validate that required resources are present.""" @@ -61,3 +69,16 @@ def update_client(self, client_name: str): def clear_client(self): """Clear the client name when the lease is no longer held.""" self.client_name = "" + + def update_status(self, status: ExporterStatus, message: str = ""): + """Update the current status in the lease context. + + This stores the status in the LeaseContext so it's available even before + the session is created, fixing the race condition where GetStatus is called + before the session can be updated. + """ + self.current_status = status + self.status_message = message + # Also update session if it exists + if self.session: + self.session.update_status(status, message) From 627a0b363ee4dd5818e37e234c747a86361ab3a6 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 23:25:21 -0500 Subject: [PATCH 21/30] Fix additional status update race conditions breaking E2E --- .../jumpstarter/jumpstarter/exporter/exporter.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index 948de420..716fe635 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -259,8 +259,11 @@ async def _register_with_controller(self, local_channel: grpc.aio.Channel): ) # Mark exporter as registered internally self._registered = True - # Report that exporter is available to the controller - await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") + # Only report AVAILABLE status during initial registration (no lease context) + # During per-lease registration, status is managed by serve() to avoid + # overwriting LEASE_READY with AVAILABLE + if self._lease_context is None: + await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") async def _report_status(self, status: ExporterStatus, message: str = ""): """Report the exporter status with the controller and session.""" @@ -410,16 +413,16 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseC # Populate the lease scope with session and socket path lease_scope.session = session lease_scope.socket_path = path - # Sync current status to the newly created session - # This ensures the session has the correct status even if _report_status - # was called before the session was created (race condition fix) - session.update_status(lease_scope.current_status, lease_scope.status_message) # Wait for before-lease hook to complete before processing client connections logger.info("Waiting for before-lease hook to complete before accepting connections") await lease_scope.before_lease_hook.wait() logger.info("Before-lease hook completed, now accepting connections") + # Sync status to session AFTER hook completes - this ensures we have LEASE_READY + # status from serve() rather than the default AVAILABLE + session.update_status(lease_scope.current_status, lease_scope.status_message) + # Process client connections # Type: request is jumpstarter_pb2.ListenResponse with router_endpoint and router_token fields async for request in listen_rx: From df07c5f63665bd5610d475377ffd4f4393d5077e Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 23:49:13 -0500 Subject: [PATCH 22/30] Fix unit test failures --- .../jumpstarter_cli_admin/create_test.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/python/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py b/python/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py index 7331c3b5..5d4c2d9c 100644 --- a/python/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py +++ b/python/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py @@ -234,7 +234,9 @@ def test_create_client( "name": "{name}-credential" }}, "devices": [], - "endpoint": "{endpoint}" + "endpoint": "{endpoint}", + "exporterStatus": null, + "statusMessage": null }} }} """.format(name=EXPORTER_NAME, endpoint=EXPORTER_ENDPOINT) @@ -250,6 +252,8 @@ def test_create_client( name: {name}-credential devices: [] endpoint: {endpoint} + exporterStatus: null + statusMessage: null """.format(name=EXPORTER_NAME, endpoint=EXPORTER_ENDPOINT) From 77a9b00bbdb350bc38be4270d276672d51e2ad3f Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Wed, 26 Nov 2025 14:07:20 -0500 Subject: [PATCH 23/30] Fix broken unit tests --- .../jumpstarter_kubernetes/exporters_test.py | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/python/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py b/python/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py index 1792a0f3..683f2354 100644 --- a/python/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py +++ b/python/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py @@ -47,7 +47,9 @@ def test_exporter_dump_json(): "uuid": "f4cf49ab-fc64-46c6-94e7-a40502eb77b1" } ], - "endpoint": "https://test-exporter" + "endpoint": "https://test-exporter", + "exporterStatus": null, + "statusMessage": null } }""" ) @@ -73,6 +75,8 @@ def test_exporter_dump_yaml(): test: label uuid: f4cf49ab-fc64-46c6-94e7-a40502eb77b1 endpoint: https://test-exporter + exporterStatus: null + statusMessage: null """ ) @@ -113,8 +117,9 @@ def test_exporter_rich_add_columns_without_devices(): mock_table = MagicMock() V1Alpha1Exporter.rich_add_columns(mock_table, devices=False) - assert mock_table.add_column.call_count == 4 + assert mock_table.add_column.call_count == 5 mock_table.add_column.assert_any_call("NAME", no_wrap=True) + mock_table.add_column.assert_any_call("STATUS") mock_table.add_column.assert_any_call("ENDPOINT") mock_table.add_column.assert_any_call("DEVICES") mock_table.add_column.assert_any_call("AGE") @@ -128,8 +133,9 @@ def test_exporter_rich_add_columns_with_devices(): mock_table = MagicMock() V1Alpha1Exporter.rich_add_columns(mock_table, devices=True) - assert mock_table.add_column.call_count == 5 + assert mock_table.add_column.call_count == 6 mock_table.add_column.assert_any_call("NAME", no_wrap=True) + mock_table.add_column.assert_any_call("STATUS") mock_table.add_column.assert_any_call("ENDPOINT") mock_table.add_column.assert_any_call("AGE") mock_table.add_column.assert_any_call("LABELS") @@ -146,9 +152,10 @@ def test_exporter_rich_add_rows_without_devices(): mock_table.add_row.assert_called_once() args = mock_table.add_row.call_args[0] assert args[0] == "test-exporter" - assert args[1] == "https://test-exporter" - assert args[2] == "1" # Number of devices - assert args[3] == "5m" # Age + assert args[1] == "Unknown" # Status (shows "Unknown" when exporter_status is None) + assert args[2] == "https://test-exporter" + assert args[3] == "1" # Number of devices + assert args[4] == "5m" # Age def test_exporter_rich_add_rows_with_devices(): @@ -161,10 +168,11 @@ def test_exporter_rich_add_rows_with_devices(): mock_table.add_row.assert_called_once() args = mock_table.add_row.call_args[0] assert args[0] == "test-exporter" - assert args[1] == "https://test-exporter" - assert args[2] == "5m" # Age - assert args[3] == "test:label" # Labels - assert args[4] == "f4cf49ab-fc64-46c6-94e7-a40502eb77b1" # UUID + assert args[1] == "Unknown" # Status (shows "Unknown" when exporter_status is None) + assert args[2] == "https://test-exporter" + assert args[3] == "5m" # Age + assert args[4] == "test:label" # Labels + assert args[5] == "f4cf49ab-fc64-46c6-94e7-a40502eb77b1" # UUID def test_exporter_rich_add_names(): @@ -212,7 +220,7 @@ def test_exporter_list_rich_add_columns(): mock_table = MagicMock() V1Alpha1ExporterList.rich_add_columns(mock_table, devices=False) - assert mock_table.add_column.call_count == 4 + assert mock_table.add_column.call_count == 5 def test_exporter_list_rich_add_columns_with_devices(): @@ -223,7 +231,7 @@ def test_exporter_list_rich_add_columns_with_devices(): mock_table = MagicMock() V1Alpha1ExporterList.rich_add_columns(mock_table, devices=True) - assert mock_table.add_column.call_count == 5 + assert mock_table.add_column.call_count == 6 def test_exporter_list_rich_add_rows(): From 1ac0870cd588b11548ebd2cfe2cf7d471e6c68c4 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Thu, 27 Nov 2025 08:29:09 -0500 Subject: [PATCH 24/30] Fix CodeRabbit warnings for previous_leased --- .../jumpstarter/jumpstarter/exporter/exporter.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index 716fe635..ed11e6f2 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -132,6 +132,13 @@ class Exporter(AsyncContextManagerMixin, Metadata): AFTER_LEASE_HOOK, BEFORE_LEASE_HOOK_FAILED, AFTER_LEASE_HOOK_FAILED. """ + _previous_leased: bool = field(init=False, default=False) + """Previous lease state used to detect lease state transitions. + + Tracks whether the exporter was leased in the previous status check to + determine when to trigger before-lease and after-lease hooks. + """ + _lease_context: LeaseContext | None = field(init=False, default=None) """Encapsulates all resources associated with the current lease. @@ -478,7 +485,7 @@ async def serve(self): # noqa: C901 break # Check for lease state transitions - previous_leased = hasattr(self, "_previous_leased") and self._previous_leased + previous_leased = self._previous_leased current_leased = status.leased # Check if this is a new lease assignment (first time or lease name changed) From 395a2922923389e1176017dac58078e69c87de66 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Sun, 4 Jan 2026 18:22:20 -0500 Subject: [PATCH 25/30] Fix hooks race condition --- .../jumpstarter/jumpstarter/exporter/hooks.py | 128 +++++++++++------- 1 file changed, 79 insertions(+), 49 deletions(-) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py index 3a76fda7..1f37ace2 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -1,16 +1,18 @@ """Lifecycle hooks for Jumpstarter exporters.""" -import asyncio import logging import os +import subprocess from collections.abc import Awaitable from dataclasses import dataclass from typing import TYPE_CHECKING, Callable, Literal +import anyio +from anyio import open_process + from jumpstarter.common import ExporterStatus, LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 -from jumpstarter.exporter.logging import get_logger from jumpstarter.exporter.session import Session if TYPE_CHECKING: @@ -148,7 +150,12 @@ async def _execute_hook_process( logging_session: Session, hook_type: Literal["before_lease", "after_lease"], ) -> None: - """Execute the hook process with the given environment and logging session.""" + """Execute the hook process with the given environment and logging session. + + Uses anyio for subprocess execution to be compatible with the anyio-based exporter. + """ + + command = hook_config.script timeout = hook_config.timeout on_failure = hook_config.on_failure @@ -156,56 +163,60 @@ async def _execute_hook_process( # Exception handling error_msg: str | None = None cause: Exception | None = None + timed_out = False try: - # Execute the hook command using shell - process = await asyncio.create_subprocess_shell( + # Execute the hook command using shell via anyio + # Pass the command as a string to use shell mode + async with await open_process( command, env=hook_env, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.STDOUT, - ) - - try: - # Create a logger with automatic source registration - hook_logger = get_logger(f"hook.{lease_scope.lease_name}", log_source, logging_session) - - # Stream output line-by-line for real-time logging - output_lines = [] - - async def read_output(): - while True: - line = await process.stdout.readline() - if not line: - break - line_decoded = line.decode().rstrip() - output_lines.append(line_decoded) - # Route hook output through the logging system - hook_logger.info(line_decoded) - - # Run output reading and process waiting concurrently with timeout - await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=timeout) + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) as process: + output_lines: list[str] = [] + + async def read_output() -> None: + """Read stdout line by line.""" + assert process.stdout is not None + buffer = b"" + async for chunk in process.stdout: + buffer += chunk + while b"\n" in buffer: + line, buffer = buffer.split(b"\n", 1) + line_decoded = line.decode().rstrip() + output_lines.append(line_decoded) + logger.info("[hook output] %s", line_decoded) + # Handle any remaining data without newline + if buffer: + line_decoded = buffer.decode().rstrip() + if line_decoded: + output_lines.append(line_decoded) + logger.info("[hook output] %s", line_decoded) + + # Use move_on_after for timeout + with anyio.move_on_after(timeout) as cancel_scope: + await read_output() + await process.wait() - # Check if hook succeeded (exit code 0) - if process.returncode == 0: + if cancel_scope.cancelled_caught: + timed_out = True + error_msg = f"Hook timed out after {timeout} seconds" + logger.error(error_msg) + # Terminate the process + process.terminate() + # Give it a moment to terminate gracefully + with anyio.move_on_after(5): + await process.wait() + # Force kill if still running + if process.returncode is None: + process.kill() + + elif process.returncode == 0: logger.info("Hook executed successfully") return - - # Non-zero exit code is a failure - error_msg = f"Hook failed with exit code {process.returncode}" - - except asyncio.TimeoutError as e: - error_msg = f"Hook timed out after {timeout} seconds" - cause = e - logger.error(error_msg) - try: - # Attempt to gracefully terminate the process - process.terminate() - await asyncio.wait_for(process.wait(), timeout=5) - except asyncio.TimeoutError: - # Force kill if it didn't terminate in time - process.kill() - await process.wait() + else: + error_msg = f"Hook failed with exit code {process.returncode}" except Exception as e: error_msg = f"Error executing hook: {e}" @@ -214,6 +225,9 @@ async def read_output(): # Handle failure if one occurred if error_msg is not None: + # For timeout, create a TimeoutError as the cause + if timed_out and cause is None: + cause = TimeoutError(error_msg) self._handle_hook_failure(error_msg, on_failure, hook_type, cause) async def execute_before_lease_hook(self, lease_scope: "LeaseContext") -> None: @@ -278,7 +292,19 @@ async def run_before_lease_hook( """ try: # Wait for lease scope to be fully populated by handle_lease - assert lease_scope.is_ready(), "LeaseScope must be fully initialized before running before-lease hooks" + # This is necessary because handle_lease and run_before_lease_hook run concurrently + timeout = 30 # seconds + interval = 0.1 # seconds + elapsed = 0.0 + while not lease_scope.is_ready(): + if elapsed >= timeout: + error_msg = "Timeout waiting for lease scope to be ready" + logger.error(error_msg) + await report_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, error_msg) + lease_scope.before_lease_hook.set() + return + await anyio.sleep(interval) + elapsed += interval # Check if hook is configured if not self.config.before_lease: @@ -351,8 +377,12 @@ async def run_after_lease_hook( shutdown: Callback to trigger exporter shutdown on critical failures """ try: - # Verify lease scope is ready - assert lease_scope.is_ready(), "LeaseScope must be fully initialized before running after-lease hooks" + # Verify lease scope is ready - for after-lease this should always be true + # since we've already processed the lease, but check defensively + if not lease_scope.is_ready(): + logger.warning("LeaseScope not ready for after-lease hook, skipping") + await report_status(ExporterStatus.AVAILABLE, "Available for new lease") + return # Check if hook is configured if not self.config.after_lease: From 7ca62112bd961f798787c369611eee036cb7be55 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 5 Jan 2026 22:33:57 -0500 Subject: [PATCH 26/30] Fix exit on hook failure and exit code handling --- .../jumpstarter-cli/jumpstarter_cli/run.py | 15 ++++++++++-- .../jumpstarter/exporter/exporter.py | 23 ++++++++++++++++++- .../jumpstarter/jumpstarter/exporter/hooks.py | 14 ++++++----- 3 files changed, 43 insertions(+), 9 deletions(-) diff --git a/python/packages/jumpstarter-cli/jumpstarter_cli/run.py b/python/packages/jumpstarter-cli/jumpstarter_cli/run.py index 50f9606e..3650b63a 100644 --- a/python/packages/jumpstarter-cli/jumpstarter_cli/run.py +++ b/python/packages/jumpstarter-cli/jumpstarter_cli/run.py @@ -76,11 +76,22 @@ async def signal_handler(): except* Exception as excgroup: _handle_exporter_exceptions(excgroup) + # Check if exporter set an exit code (e.g., from hook failure with on_failure='exit') + exporter_exit_code = exporter.exit_code + # Cancel the signal handler after exporter completes signal_tg.cancel_scope.cancel() - # Return signal number if received, otherwise 0 for immediate restart - return received_signal if received_signal else 0 + # Return exit code in priority order: + # 1. Signal number if received (for signal-based termination) + # 2. Exporter's exit code if set (for hook failure with on_failure='exit') + # 3. 0 for immediate restart (normal exit without signal or explicit exit code) + if received_signal: + return received_signal + elif exporter_exit_code is not None: + return exporter_exit_code + else: + return 0 sys.exit(anyio.run(serve_with_graceful_shutdown)) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index ed11e6f2..15e8c37d 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -139,6 +139,14 @@ class Exporter(AsyncContextManagerMixin, Metadata): determine when to trigger before-lease and after-lease hooks. """ + _exit_code: int | None = field(init=False, default=None) + """Exit code to use when the exporter shuts down. + + When set to a non-zero value, the exporter should terminate permanently + (not restart). This is used by hooks with on_failure='exit' to signal + that the exporter should shut down and not be restarted by the CLI. + """ + _lease_context: LeaseContext | None = field(init=False, default=None) """Encapsulates all resources associated with the current lease. @@ -157,13 +165,17 @@ class Exporter(AsyncContextManagerMixin, Metadata): a reference holder and doesn't manage resource lifecycles directly. """ - def stop(self, wait_for_lease_exit=False, should_unregister=False): + def stop(self, wait_for_lease_exit=False, should_unregister=False, exit_code: int | None = None): """Signal the exporter to stop. Args: wait_for_lease_exit (bool): If True, wait for the current lease to exit before stopping. should_unregister (bool): If True, unregister from controller. Otherwise rely on heartbeat. + exit_code (int | None): If set, the exporter will exit with this code (non-zero means no restart). """ + # Set exit code if provided + if exit_code is not None: + self._exit_code = exit_code # Stop immediately if not started yet or if immediate stop is requested if (not self._started or not wait_for_lease_exit) and self._tg is not None: @@ -178,6 +190,15 @@ def stop(self, wait_for_lease_exit=False, should_unregister=False): self._stop_requested = True logger.info("Exporter marked for stop upon lease exit") + @property + def exit_code(self) -> int | None: + """Get the exit code for the exporter. + + Returns: + The exit code if set, or None if the exporter should restart. + """ + return self._exit_code + async def _get_controller_stub(self) -> jumpstarter_pb2_grpc.ControllerServiceStub: """Create and return a controller service stub.""" return jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py index 1f37ace2..d3ec0e36 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -274,7 +274,7 @@ async def run_before_lease_hook( self, lease_scope: "LeaseContext", report_status: Callable[["ExporterStatus", str], Awaitable[None]], - shutdown: Callable[[], None], + shutdown: Callable[..., None], ) -> None: """Execute before-lease hook with full orchestration. @@ -288,7 +288,7 @@ async def run_before_lease_hook( Args: lease_scope: LeaseScope containing session, socket_path, and sync event report_status: Async callback to report status changes to controller - shutdown: Callback to trigger exporter shutdown on critical failures + shutdown: Callback to trigger exporter shutdown (accepts optional exit_code kwarg) """ try: # Wait for lease scope to be fully populated by handle_lease @@ -334,7 +334,8 @@ async def run_before_lease_hook( f"beforeLease hook failed (on_failure=exit, shutting down): {e}", ) logger.error("Shutting down exporter due to beforeLease hook failure with on_failure='exit'") - shutdown() + # Exit code 1 tells the CLI not to restart the exporter + shutdown(exit_code=1) else: # on_failure='endLease' - just block this lease, exporter stays available logger.error("beforeLease hook failed with on_failure='endLease': %s", e) @@ -360,7 +361,7 @@ async def run_after_lease_hook( self, lease_scope: "LeaseContext", report_status: Callable[["ExporterStatus", str], Awaitable[None]], - shutdown: Callable[[], None], + shutdown: Callable[..., None], ) -> None: """Execute after-lease hook with full orchestration. @@ -374,7 +375,7 @@ async def run_after_lease_hook( Args: lease_scope: LeaseScope containing session, socket_path, and client info report_status: Async callback to report status changes to controller - shutdown: Callback to trigger exporter shutdown on critical failures + shutdown: Callback to trigger exporter shutdown (accepts optional exit_code kwarg) """ try: # Verify lease scope is ready - for after-lease this should always be true @@ -412,7 +413,8 @@ async def run_after_lease_hook( f"afterLease hook failed (on_failure=exit, shutting down): {e}", ) logger.error("Shutting down exporter due to afterLease hook failure with on_failure='exit'") - shutdown() + # Exit code 1 tells the CLI not to restart the exporter + shutdown(exit_code=1) else: # on_failure='endLease' - lease already ended, just report the failure # The exporter remains available for new leases From e567fc16e45c41b17409c81d83932dfdbebf928c Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 5 Jan 2026 22:58:01 -0500 Subject: [PATCH 27/30] Enable executing j commands within hooks --- .../jumpstarter/jumpstarter/client/core.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/python/packages/jumpstarter/jumpstarter/client/core.py b/python/packages/jumpstarter/jumpstarter/client/core.py index 4f859f23..85a7267e 100644 --- a/python/packages/jumpstarter/jumpstarter/client/core.py +++ b/python/packages/jumpstarter/jumpstarter/client/core.py @@ -85,12 +85,24 @@ def __post_init__(self): self.logger.addHandler(handler) async def check_exporter_status(self): - """Check if the exporter is ready to accept driver calls""" + """Check if the exporter is ready to accept driver calls. + + Allows driver commands during hook execution (BEFORE_LEASE_HOOK, AFTER_LEASE_HOOK) + in addition to the normal LEASE_READY status. This enables hooks to interact + with drivers via the `j` CLI for automation use cases. + """ + # Statuses that allow driver commands + ALLOWED_STATUSES = { + ExporterStatus.LEASE_READY, + ExporterStatus.BEFORE_LEASE_HOOK, + ExporterStatus.AFTER_LEASE_HOOK, + } + try: response = await self.stub.GetStatus(jumpstarter_pb2.GetStatusRequest()) status = ExporterStatus.from_proto(response.status) - if status != ExporterStatus.LEASE_READY: + if status not in ALLOWED_STATUSES: raise ExporterNotReady(f"Exporter status is {status}: {response.message}") except AioRpcError as e: From 419ef9d841bcc6c3736f2cd74d5b8bfe562a3f69 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Thu, 15 Jan 2026 08:28:27 -0500 Subject: [PATCH 28/30] Add post-lease hook streaming --- .../jumpstarter-cli/jumpstarter_cli/shell.py | 123 +++++++++++++----- .../jumpstarter/client/v1/client_pb2.py | 54 ++++---- .../jumpstarter/client/v1/client_pb2.pyi | 5 +- .../jumpstarter/v1/jumpstarter_pb2.py | 14 +- .../jumpstarter/v1/jumpstarter_pb2.pyi | 30 +++++ .../jumpstarter/v1/jumpstarter_pb2_grpc.py | 46 +++++++ .../jumpstarter/v1/jumpstarter_pb2_grpc.pyi | 47 ++++++- .../jumpstarter/jumpstarter/client/core.py | 48 +++++-- .../jumpstarter/exporter/exporter.py | 45 ++++++- .../jumpstarter/jumpstarter/exporter/hooks.py | 114 ++++++++-------- .../jumpstarter/exporter/lease_context.py | 4 + .../jumpstarter/exporter/session.py | 36 +++++ 12 files changed, 438 insertions(+), 128 deletions(-) diff --git a/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py b/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py index 41155a4f..e83d4146 100644 --- a/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py +++ b/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py @@ -1,3 +1,4 @@ +import logging import sys from datetime import timedelta @@ -6,11 +7,6 @@ from anyio import create_task_group, get_cancelled_exc_class from jumpstarter_cli_common.config import opt_config from jumpstarter_cli_common.exceptions import handle_exceptions_with_reauthentication -from jumpstarter_cli_common.oidc import ( - TOKEN_EXPIRY_WARNING_SECONDS, - format_duration, - get_token_remaining_seconds, -) from jumpstarter_cli_common.signal import signal_handler from .common import opt_acquisition_timeout, opt_duration_partial, opt_selector @@ -19,6 +15,16 @@ from jumpstarter.config.client import ClientConfigV1Alpha1 from jumpstarter.config.exporter import ExporterConfigV1Alpha1 +logger = logging.getLogger(__name__) + + +def _run_shell_only(lease, config, command, path: str) -> int: + """Run just the shell command without log streaming.""" + return launch_shell( + path, lease.exporter_name, config.drivers.allow, config.drivers.unsafe, + config.shell.use_profiles, command=command, lease=lease + ) + def _warn_about_expired_token(lease_name: str, selector: str) -> None: """Warn user that lease won't be cleaned up due to expired token.""" @@ -29,6 +35,16 @@ def _warn_about_expired_token(lease_name: str, selector: str) -> None: async def _monitor_token_expiry(config, cancel_scope) -> None: """Monitor token expiry and warn user.""" + try: + from jumpstarter_cli_common.oidc import ( + TOKEN_EXPIRY_WARNING_SECONDS, + format_duration, + get_token_remaining_seconds, + ) + except ImportError: + # OIDC support not available + return + token = getattr(config, "token", None) if not token: return @@ -62,27 +78,67 @@ async def _monitor_token_expiry(config, cancel_scope) -> None: def _run_shell_with_lease(lease, exporter_logs, config, command): - """Run shell with lease context managers.""" - - def launch_remote_shell(path: str) -> int: - return launch_shell( - path, - lease.exporter_name, - config.drivers.allow, - config.drivers.unsafe, - config.shell.use_profiles, - command=command, - lease=lease, - ) - + """Run shell with lease context managers (no afterLease hook waiting).""" with lease.serve_unix() as path: with lease.monitor(): if exporter_logs: with lease.connect() as client: with client.log_stream(): - return launch_remote_shell(path) + return _run_shell_only(lease, config, command, path) + else: + return _run_shell_only(lease, config, command, path) + + +async def _run_shell_with_lease_async(lease, exporter_logs, config, command, cancel_scope): + """Run shell with lease context managers and wait for afterLease hook if logs enabled. + + When exporter_logs is enabled, this function will: + 1. Run the shell command + 2. After shell exits, call EndSession to trigger and wait for afterLease hook + 3. Logs stream to client during hook execution + 4. Release the lease after hook completes + + If Ctrl+C is pressed during EndSession, the wait is skipped but the lease is still released. + """ + from contextlib import ExitStack + + from jumpstarter.client.client import client_from_path + + async with lease.serve_unix_async() as path: + async with lease.monitor_async(): + if exporter_logs: + # Use ExitStack for the client (required by client_from_path) + with ExitStack() as stack: + async with client_from_path( + path, lease.portal, stack, allow=lease.allow, unsafe=lease.unsafe + ) as client: + async with client.log_stream_async(): + # Run the shell command + exit_code = await anyio.to_thread.run_sync( + _run_shell_only, lease, config, command, path + ) + + # Shell has exited. Call EndSession to trigger afterLease hook + # while keeping log stream open to receive hook logs + if lease.name and not cancel_scope.cancel_called: + logger.info("Running afterLease hook (Ctrl+C to skip)...") + try: + # EndSession triggers the afterLease hook and waits for completion + # Logs are streamed to us during hook execution + success = await client.end_session_async() + if success: + logger.debug("EndSession completed successfully") + else: + logger.debug("EndSession not implemented, skipping hook wait") + except Exception as e: + logger.warning("Error during EndSession: %s", e) + + return exit_code else: - return launch_remote_shell(path) + exit_code = await anyio.to_thread.run_sync( + _run_shell_only, lease, config, command, path + ) + return exit_code async def _shell_with_signal_handling( @@ -96,10 +152,14 @@ async def _shell_with_signal_handling( # Check token before starting token = getattr(config, "token", None) if token: - remaining = get_token_remaining_seconds(token) - if remaining is not None and remaining <= 0: - from jumpstarter.common.exceptions import ConnectionError - raise ConnectionError("token is expired") + try: + from jumpstarter_cli_common.oidc import get_token_remaining_seconds + remaining = get_token_remaining_seconds(token) + if remaining is not None and remaining <= 0: + from jumpstarter.common.exceptions import ConnectionError + raise ConnectionError("token is expired") + except ImportError: + pass async with create_task_group() as tg: tg.start_soon(signal_handler, tg.cancel_scope) @@ -113,8 +173,8 @@ async def _shell_with_signal_handling( # Start token monitoring only once we're in the shell tg.start_soon(_monitor_token_expiry, config, tg.cancel_scope) - exit_code = await anyio.to_thread.run_sync( - _run_shell_with_lease, lease, exporter_logs, config, command + exit_code = await _run_shell_with_lease_async( + lease, exporter_logs, config, command, tg.cancel_scope ) except BaseExceptionGroup as eg: for exc in eg.exceptions: @@ -125,10 +185,13 @@ async def _shell_with_signal_handling( # Check if cancellation was due to token expiry token = getattr(config, "token", None) if lease_used and token: - remaining = get_token_remaining_seconds(token) - if remaining is not None and remaining <= 0: - _warn_about_expired_token(lease_used.name, selector) - return 3 # Exit code for token expiry + try: + from jumpstarter_cli_common.oidc import get_token_remaining_seconds + remaining = get_token_remaining_seconds(token) + if remaining is not None and remaining <= 0: + _warn_about_expired_token(lease_used.name, selector) + except ImportError: + pass exit_code = 2 finally: if not tg.cancel_scope.cancel_called: diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.py b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.py index 8cc53073..cd9f2c93 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.py +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.py @@ -34,7 +34,7 @@ from ...v1 import common_pb2 as jumpstarter_dot_v1_dot_common__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"jumpstarter/client/v1/client.proto\x12\x15jumpstarter.client.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\x1a\x1bjumpstarter/v1/common.proto\"\xe0\x02\n\x08\x45xporter\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08R\x04name\x12\x43\n\x06labels\x18\x02 \x03(\x0b\x32+.jumpstarter.client.v1.Exporter.LabelsEntryR\x06labels\x12\x1d\n\x06online\x18\x03 \x01(\x08\x42\x05\x18\x01\xe0\x41\x03R\x06online\x12;\n\x06status\x18\x04 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusB\x03\xe0\x41\x03R\x06status\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01:_\xea\x41\\\n\x18jumpstarter.dev/Exporter\x12+namespaces/{namespace}/exporters/{exporter}*\texporters2\x08\x65xporter\"\xfa\x06\n\x05Lease\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08R\x04name\x12\"\n\x08selector\x18\x02 \x01(\tB\x06\xe0\x41\x02\xe0\x41\x05R\x08selector\x12:\n\x08\x64uration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00R\x08\x64uration\x88\x01\x01\x12M\n\x12\x65\x66\x66\x65\x63tive_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x03R\x11\x65\x66\x66\x65\x63tiveDuration\x12>\n\nbegin_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01R\tbeginTime\x88\x01\x01\x12V\n\x14\x65\x66\x66\x65\x63tive_begin_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03H\x02R\x12\x65\x66\x66\x65\x63tiveBeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x03R\x07\x65ndTime\x88\x01\x01\x12R\n\x12\x65\x66\x66\x65\x63tive_end_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03H\x04R\x10\x65\x66\x66\x65\x63tiveEndTime\x88\x01\x01\x12;\n\x06\x63lient\x18\t \x01(\tB\x1e\xe0\x41\x03\xfa\x41\x18\n\x16jumpstarter.dev/ClientH\x05R\x06\x63lient\x88\x01\x01\x12\x41\n\x08\x65xporter\x18\n \x01(\tB \xe0\x41\x03\xfa\x41\x1a\n\x18jumpstarter.dev/ExporterH\x06R\x08\x65xporter\x88\x01\x01\x12>\n\nconditions\x18\x0b \x03(\x0b\x32\x19.jumpstarter.v1.ConditionB\x03\xe0\x41\x03R\nconditions:P\xea\x41M\n\x15jumpstarter.dev/Lease\x12%namespaces/{namespace}/leases/{lease}*\x06leases2\x05leaseB\x0b\n\t_durationB\r\n\x0b_begin_timeB\x17\n\x15_effective_begin_timeB\x0b\n\t_end_timeB\x15\n\x13_effective_end_timeB\t\n\x07_clientB\x0b\n\t_exporter\"J\n\x12GetExporterRequest\x12\x34\n\x04name\x18\x01 \x01(\tB \xe0\x41\x02\xfa\x41\x1a\n\x18jumpstarter.dev/ExporterR\x04name\"\xb3\x01\n\x14ListExportersRequest\x12\x38\n\x06parent\x18\x01 \x01(\tB \xe0\x41\x02\xfa\x41\x1a\x12\x18jumpstarter.dev/ExporterR\x06parent\x12 \n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\"~\n\x15ListExportersResponse\x12=\n\texporters\x18\x01 \x03(\x0b\x32\x1f.jumpstarter.client.v1.ExporterR\texporters\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"D\n\x0fGetLeaseRequest\x12\x31\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15jumpstarter.dev/LeaseR\x04name\"\xe8\x01\n\x11ListLeasesRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\x12\x15jumpstarter.dev/LeaseR\x06parent\x12 \n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\x12)\n\x0bonly_active\x18\x05 \x01(\x08\x42\x03\xe0\x41\x01H\x00R\nonlyActive\x88\x01\x01\x42\x0e\n\x0c_only_active\"r\n\x12ListLeasesResponse\x12\x34\n\x06leases\x18\x01 \x03(\x0b\x32\x1c.jumpstarter.client.v1.LeaseR\x06leases\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"\xa4\x01\n\x12\x43reateLeaseRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\x12\x15jumpstarter.dev/LeaseR\x06parent\x12\x1e\n\x08lease_id\x18\x02 \x01(\tB\x03\xe0\x41\x01R\x07leaseId\x12\x37\n\x05lease\x18\x03 \x01(\x0b\x32\x1c.jumpstarter.client.v1.LeaseB\x03\xe0\x41\x02R\x05lease\"\x8f\x01\n\x12UpdateLeaseRequest\x12\x37\n\x05lease\x18\x01 \x01(\x0b\x32\x1c.jumpstarter.client.v1.LeaseB\x03\xe0\x41\x02R\x05lease\x12@\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x01R\nupdateMask\"G\n\x12\x44\x65leteLeaseRequest\x12\x31\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15jumpstarter.dev/LeaseR\x04name2\xa7\x08\n\rClientService\x12\x8d\x01\n\x0bGetExporter\x12).jumpstarter.client.v1.GetExporterRequest\x1a\x1f.jumpstarter.client.v1.Exporter\"2\xda\x41\x04name\x82\xd3\xe4\x93\x02%\x12#/v1/{name=namespaces/*/exporters/*}\x12\xa0\x01\n\rListExporters\x12+.jumpstarter.client.v1.ListExportersRequest\x1a,.jumpstarter.client.v1.ListExportersResponse\"4\xda\x41\x06parent\x82\xd3\xe4\x93\x02%\x12#/v1/{parent=namespaces/*}/exporters\x12\x81\x01\n\x08GetLease\x12&.jumpstarter.client.v1.GetLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"/\xda\x41\x04name\x82\xd3\xe4\x93\x02\"\x12 /v1/{name=namespaces/*/leases/*}\x12\x94\x01\n\nListLeases\x12(.jumpstarter.client.v1.ListLeasesRequest\x1a).jumpstarter.client.v1.ListLeasesResponse\"1\xda\x41\x06parent\x82\xd3\xe4\x93\x02\"\x12 /v1/{parent=namespaces/*}/leases\x12\x9f\x01\n\x0b\x43reateLease\x12).jumpstarter.client.v1.CreateLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"G\xda\x41\x15parent,lease,lease_id\x82\xd3\xe4\x93\x02)\" /v1/{parent=namespaces/*}/leases:\x05lease\x12\xa1\x01\n\x0bUpdateLease\x12).jumpstarter.client.v1.UpdateLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"I\xda\x41\x11lease,update_mask\x82\xd3\xe4\x93\x02/2&/v1/{lease.name=namespaces/*/leases/*}:\x05lease\x12\x81\x01\n\x0b\x44\x65leteLease\x12).jumpstarter.client.v1.DeleteLeaseRequest\x1a\x16.google.protobuf.Empty\"/\xda\x41\x04name\x82\xd3\xe4\x93\x02\"* /v1/{name=namespaces/*/leases/*}B\x9e\x01\n\x19\x63om.jumpstarter.client.v1B\x0b\x43lientProtoP\x01\xa2\x02\x03JCX\xaa\x02\x15Jumpstarter.Client.V1\xca\x02\x15Jumpstarter\\Client\\V1\xe2\x02!Jumpstarter\\Client\\V1\\GPBMetadata\xea\x02\x17Jumpstarter::Client::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"jumpstarter/client/v1/client.proto\x12\x15jumpstarter.client.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\x1a\x1bjumpstarter/v1/common.proto\"\x8c\x03\n\x08\x45xporter\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08R\x04name\x12\x43\n\x06labels\x18\x02 \x03(\x0b\x32+.jumpstarter.client.v1.Exporter.LabelsEntryR\x06labels\x12\x1d\n\x06online\x18\x03 \x01(\x08\x42\x05\x18\x01\xe0\x41\x03R\x06online\x12;\n\x06status\x18\x04 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusB\x03\xe0\x41\x03R\x06status\x12*\n\x0estatus_message\x18\x05 \x01(\tB\x03\xe0\x41\x03R\rstatusMessage\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01:_\xea\x41\\\n\x18jumpstarter.dev/Exporter\x12+namespaces/{namespace}/exporters/{exporter}*\texporters2\x08\x65xporter\"\xfa\x06\n\x05Lease\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08R\x04name\x12\"\n\x08selector\x18\x02 \x01(\tB\x06\xe0\x41\x02\xe0\x41\x05R\x08selector\x12:\n\x08\x64uration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00R\x08\x64uration\x88\x01\x01\x12M\n\x12\x65\x66\x66\x65\x63tive_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x03R\x11\x65\x66\x66\x65\x63tiveDuration\x12>\n\nbegin_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01R\tbeginTime\x88\x01\x01\x12V\n\x14\x65\x66\x66\x65\x63tive_begin_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03H\x02R\x12\x65\x66\x66\x65\x63tiveBeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x03R\x07\x65ndTime\x88\x01\x01\x12R\n\x12\x65\x66\x66\x65\x63tive_end_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03H\x04R\x10\x65\x66\x66\x65\x63tiveEndTime\x88\x01\x01\x12;\n\x06\x63lient\x18\t \x01(\tB\x1e\xe0\x41\x03\xfa\x41\x18\n\x16jumpstarter.dev/ClientH\x05R\x06\x63lient\x88\x01\x01\x12\x41\n\x08\x65xporter\x18\n \x01(\tB \xe0\x41\x03\xfa\x41\x1a\n\x18jumpstarter.dev/ExporterH\x06R\x08\x65xporter\x88\x01\x01\x12>\n\nconditions\x18\x0b \x03(\x0b\x32\x19.jumpstarter.v1.ConditionB\x03\xe0\x41\x03R\nconditions:P\xea\x41M\n\x15jumpstarter.dev/Lease\x12%namespaces/{namespace}/leases/{lease}*\x06leases2\x05leaseB\x0b\n\t_durationB\r\n\x0b_begin_timeB\x17\n\x15_effective_begin_timeB\x0b\n\t_end_timeB\x15\n\x13_effective_end_timeB\t\n\x07_clientB\x0b\n\t_exporter\"J\n\x12GetExporterRequest\x12\x34\n\x04name\x18\x01 \x01(\tB \xe0\x41\x02\xfa\x41\x1a\n\x18jumpstarter.dev/ExporterR\x04name\"\xb3\x01\n\x14ListExportersRequest\x12\x38\n\x06parent\x18\x01 \x01(\tB \xe0\x41\x02\xfa\x41\x1a\x12\x18jumpstarter.dev/ExporterR\x06parent\x12 \n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\"~\n\x15ListExportersResponse\x12=\n\texporters\x18\x01 \x03(\x0b\x32\x1f.jumpstarter.client.v1.ExporterR\texporters\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"D\n\x0fGetLeaseRequest\x12\x31\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15jumpstarter.dev/LeaseR\x04name\"\xe8\x01\n\x11ListLeasesRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\x12\x15jumpstarter.dev/LeaseR\x06parent\x12 \n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\x12)\n\x0bonly_active\x18\x05 \x01(\x08\x42\x03\xe0\x41\x01H\x00R\nonlyActive\x88\x01\x01\x42\x0e\n\x0c_only_active\"r\n\x12ListLeasesResponse\x12\x34\n\x06leases\x18\x01 \x03(\x0b\x32\x1c.jumpstarter.client.v1.LeaseR\x06leases\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"\xa4\x01\n\x12\x43reateLeaseRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\x12\x15jumpstarter.dev/LeaseR\x06parent\x12\x1e\n\x08lease_id\x18\x02 \x01(\tB\x03\xe0\x41\x01R\x07leaseId\x12\x37\n\x05lease\x18\x03 \x01(\x0b\x32\x1c.jumpstarter.client.v1.LeaseB\x03\xe0\x41\x02R\x05lease\"\x8f\x01\n\x12UpdateLeaseRequest\x12\x37\n\x05lease\x18\x01 \x01(\x0b\x32\x1c.jumpstarter.client.v1.LeaseB\x03\xe0\x41\x02R\x05lease\x12@\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x01R\nupdateMask\"G\n\x12\x44\x65leteLeaseRequest\x12\x31\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15jumpstarter.dev/LeaseR\x04name2\xa7\x08\n\rClientService\x12\x8d\x01\n\x0bGetExporter\x12).jumpstarter.client.v1.GetExporterRequest\x1a\x1f.jumpstarter.client.v1.Exporter\"2\xda\x41\x04name\x82\xd3\xe4\x93\x02%\x12#/v1/{name=namespaces/*/exporters/*}\x12\xa0\x01\n\rListExporters\x12+.jumpstarter.client.v1.ListExportersRequest\x1a,.jumpstarter.client.v1.ListExportersResponse\"4\xda\x41\x06parent\x82\xd3\xe4\x93\x02%\x12#/v1/{parent=namespaces/*}/exporters\x12\x81\x01\n\x08GetLease\x12&.jumpstarter.client.v1.GetLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"/\xda\x41\x04name\x82\xd3\xe4\x93\x02\"\x12 /v1/{name=namespaces/*/leases/*}\x12\x94\x01\n\nListLeases\x12(.jumpstarter.client.v1.ListLeasesRequest\x1a).jumpstarter.client.v1.ListLeasesResponse\"1\xda\x41\x06parent\x82\xd3\xe4\x93\x02\"\x12 /v1/{parent=namespaces/*}/leases\x12\x9f\x01\n\x0b\x43reateLease\x12).jumpstarter.client.v1.CreateLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"G\xda\x41\x15parent,lease,lease_id\x82\xd3\xe4\x93\x02)\" /v1/{parent=namespaces/*}/leases:\x05lease\x12\xa1\x01\n\x0bUpdateLease\x12).jumpstarter.client.v1.UpdateLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"I\xda\x41\x11lease,update_mask\x82\xd3\xe4\x93\x02/2&/v1/{lease.name=namespaces/*/leases/*}:\x05lease\x12\x81\x01\n\x0b\x44\x65leteLease\x12).jumpstarter.client.v1.DeleteLeaseRequest\x1a\x16.google.protobuf.Empty\"/\xda\x41\x04name\x82\xd3\xe4\x93\x02\"* /v1/{name=namespaces/*/leases/*}B\x9e\x01\n\x19\x63om.jumpstarter.client.v1B\x0b\x43lientProtoP\x01\xa2\x02\x03JCX\xaa\x02\x15Jumpstarter.Client.V1\xca\x02\x15Jumpstarter\\Client\\V1\xe2\x02!Jumpstarter\\Client\\V1\\GPBMetadata\xea\x02\x17Jumpstarter::Client::V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -50,6 +50,8 @@ _globals['_EXPORTER'].fields_by_name['online']._serialized_options = b'\030\001\340A\003' _globals['_EXPORTER'].fields_by_name['status']._loaded_options = None _globals['_EXPORTER'].fields_by_name['status']._serialized_options = b'\340A\003' + _globals['_EXPORTER'].fields_by_name['status_message']._loaded_options = None + _globals['_EXPORTER'].fields_by_name['status_message']._serialized_options = b'\340A\003' _globals['_EXPORTER']._loaded_options = None _globals['_EXPORTER']._serialized_options = b'\352A\\\n\030jumpstarter.dev/Exporter\022+namespaces/{namespace}/exporters/{exporter}*\texporters2\010exporter' _globals['_LEASE'].fields_by_name['name']._loaded_options = None @@ -119,29 +121,29 @@ _globals['_CLIENTSERVICE'].methods_by_name['DeleteLease']._loaded_options = None _globals['_CLIENTSERVICE'].methods_by_name['DeleteLease']._serialized_options = b'\332A\004name\202\323\344\223\002\"* /v1/{name=namespaces/*/leases/*}' _globals['_EXPORTER']._serialized_start=367 - _globals['_EXPORTER']._serialized_end=719 - _globals['_EXPORTER_LABELSENTRY']._serialized_start=565 - _globals['_EXPORTER_LABELSENTRY']._serialized_end=622 - _globals['_LEASE']._serialized_start=722 - _globals['_LEASE']._serialized_end=1612 - _globals['_GETEXPORTERREQUEST']._serialized_start=1614 - _globals['_GETEXPORTERREQUEST']._serialized_end=1688 - _globals['_LISTEXPORTERSREQUEST']._serialized_start=1691 - _globals['_LISTEXPORTERSREQUEST']._serialized_end=1870 - _globals['_LISTEXPORTERSRESPONSE']._serialized_start=1872 - _globals['_LISTEXPORTERSRESPONSE']._serialized_end=1998 - _globals['_GETLEASEREQUEST']._serialized_start=2000 - _globals['_GETLEASEREQUEST']._serialized_end=2068 - _globals['_LISTLEASESREQUEST']._serialized_start=2071 - _globals['_LISTLEASESREQUEST']._serialized_end=2303 - _globals['_LISTLEASESRESPONSE']._serialized_start=2305 - _globals['_LISTLEASESRESPONSE']._serialized_end=2419 - _globals['_CREATELEASEREQUEST']._serialized_start=2422 - _globals['_CREATELEASEREQUEST']._serialized_end=2586 - _globals['_UPDATELEASEREQUEST']._serialized_start=2589 - _globals['_UPDATELEASEREQUEST']._serialized_end=2732 - _globals['_DELETELEASEREQUEST']._serialized_start=2734 - _globals['_DELETELEASEREQUEST']._serialized_end=2805 - _globals['_CLIENTSERVICE']._serialized_start=2808 - _globals['_CLIENTSERVICE']._serialized_end=3871 + _globals['_EXPORTER']._serialized_end=763 + _globals['_EXPORTER_LABELSENTRY']._serialized_start=609 + _globals['_EXPORTER_LABELSENTRY']._serialized_end=666 + _globals['_LEASE']._serialized_start=766 + _globals['_LEASE']._serialized_end=1656 + _globals['_GETEXPORTERREQUEST']._serialized_start=1658 + _globals['_GETEXPORTERREQUEST']._serialized_end=1732 + _globals['_LISTEXPORTERSREQUEST']._serialized_start=1735 + _globals['_LISTEXPORTERSREQUEST']._serialized_end=1914 + _globals['_LISTEXPORTERSRESPONSE']._serialized_start=1916 + _globals['_LISTEXPORTERSRESPONSE']._serialized_end=2042 + _globals['_GETLEASEREQUEST']._serialized_start=2044 + _globals['_GETLEASEREQUEST']._serialized_end=2112 + _globals['_LISTLEASESREQUEST']._serialized_start=2115 + _globals['_LISTLEASESREQUEST']._serialized_end=2347 + _globals['_LISTLEASESRESPONSE']._serialized_start=2349 + _globals['_LISTLEASESRESPONSE']._serialized_end=2463 + _globals['_CREATELEASEREQUEST']._serialized_start=2466 + _globals['_CREATELEASEREQUEST']._serialized_end=2630 + _globals['_UPDATELEASEREQUEST']._serialized_start=2633 + _globals['_UPDATELEASEREQUEST']._serialized_end=2776 + _globals['_DELETELEASEREQUEST']._serialized_start=2778 + _globals['_DELETELEASEREQUEST']._serialized_end=2849 + _globals['_CLIENTSERVICE']._serialized_start=2852 + _globals['_CLIENTSERVICE']._serialized_end=3915 # @@protoc_insertion_point(module_scope) diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi index 500b1379..a8e32290 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi @@ -53,9 +53,11 @@ class Exporter(google.protobuf.message.Message): LABELS_FIELD_NUMBER: builtins.int ONLINE_FIELD_NUMBER: builtins.int STATUS_FIELD_NUMBER: builtins.int + STATUS_MESSAGE_FIELD_NUMBER: builtins.int name: builtins.str online: builtins.bool status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + status_message: builtins.str @property def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... def __init__( @@ -65,8 +67,9 @@ class Exporter(google.protobuf.message.Message): labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., online: builtins.bool = ..., status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + status_message: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["labels", b"labels", "name", b"name", "online", b"online", "status", b"status"]) -> None: ... + def ClearField(self, field_name: typing.Literal["labels", b"labels", "name", b"name", "online", b"online", "status", b"status", "status_message", b"status_message"]) -> None: ... Global___Exporter: typing_extensions.TypeAlias = Exporter diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py index b735bcad..cf415caa 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py @@ -30,7 +30,7 @@ from . import common_pb2 as jumpstarter_dot_v1_dot_common__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n jumpstarter/v1/jumpstarter.proto\x12\x0ejumpstarter.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\x1a\x1bjumpstarter/v1/common.proto\"\xd1\x01\n\x0fRegisterRequest\x12\x43\n\x06labels\x18\x01 \x03(\x0b\x32+.jumpstarter.v1.RegisterRequest.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x02 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xd2\x03\n\x14\x44riverInstanceReport\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12$\n\x0bparent_uuid\x18\x02 \x01(\tH\x00R\nparentUuid\x88\x01\x01\x12H\n\x06labels\x18\x03 \x03(\x0b\x32\x30.jumpstarter.v1.DriverInstanceReport.LabelsEntryR\x06labels\x12%\n\x0b\x64\x65scription\x18\x04 \x01(\tH\x01R\x0b\x64\x65scription\x88\x01\x01\x12m\n\x13methods_description\x18\x05 \x03(\x0b\x32<.jumpstarter.v1.DriverInstanceReport.MethodsDescriptionEntryR\x12methodsDescription\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x45\n\x17MethodsDescriptionEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x0e\n\x0c_parent_uuidB\x0e\n\x0c_description\"&\n\x10RegisterResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\"+\n\x11UnregisterRequest\x12\x16\n\x06reason\x18\x02 \x01(\tR\x06reason\"\x14\n\x12UnregisterResponse\".\n\rListenRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"\\\n\x0eListenResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\x0f\n\rStatusRequest\"\x91\x01\n\x0eStatusResponse\x12\x16\n\x06leased\x18\x01 \x01(\x08R\x06leased\x12\"\n\nlease_name\x18\x02 \x01(\tH\x00R\tleaseName\x88\x01\x01\x12$\n\x0b\x63lient_name\x18\x03 \x01(\tH\x01R\nclientName\x88\x01\x01\x42\r\n\x0b_lease_nameB\x0e\n\x0c_client_name\",\n\x0b\x44ialRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"Z\n\x0c\x44ialResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\xa1\x01\n\x12\x41uditStreamRequest\x12#\n\rexporter_uuid\x18\x01 \x01(\tR\x0c\x65xporterUuid\x12\x30\n\x14\x64river_instance_uuid\x18\x02 \x01(\tR\x12\x64riverInstanceUuid\x12\x1a\n\x08severity\x18\x03 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x04 \x01(\tR\x07message\"x\n\x13ReportStatusRequest\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message\"\x16\n\x14ReportStatusResponse\"\xb8\x02\n\x11GetReportResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x45\n\x06labels\x18\x02 \x03(\x0b\x32-.jumpstarter.v1.GetReportResponse.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x03 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x12M\n\x15\x61lternative_endpoints\x18\x04 \x03(\x0b\x32\x18.jumpstarter.v1.EndpointR\x14\x61lternativeEndpoints\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xa5\x01\n\x08\x45ndpoint\x12\x1a\n\x08\x65ndpoint\x18\x01 \x01(\tR\x08\x65ndpoint\x12 \n\x0b\x63\x65rtificate\x18\x02 \x01(\tR\x0b\x63\x65rtificate\x12-\n\x12\x63lient_certificate\x18\x03 \x01(\tR\x11\x63lientCertificate\x12,\n\x12\x63lient_private_key\x18\x04 \x01(\tR\x10\x63lientPrivateKey\"k\n\x11\x44riverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"X\n\x12\x44riverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"t\n\x1aStreamingDriverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"a\n\x1bStreamingDriverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"\xa0\x01\n\x11LogStreamResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x1a\n\x08severity\x18\x02 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x36\n\x06source\x18\x04 \x01(\x0e\x32\x19.jumpstarter.v1.LogSourceH\x00R\x06source\x88\x01\x01\x42\t\n\x07_source\"\x0e\n\x0cResetRequest\"\x0f\n\rResetResponse\"%\n\x0fGetLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x93\x03\n\x10GetLeaseResponse\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\x12>\n\nbegin_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\tbeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01R\x07\x65ndTime\x88\x01\x01\x12(\n\rexporter_uuid\x18\x05 \x01(\tH\x02R\x0c\x65xporterUuid\x88\x01\x01\x12\x39\n\nconditions\x18\x06 \x03(\x0b\x32\x19.jumpstarter.v1.ConditionR\nconditionsB\r\n\x0b_begin_timeB\x0b\n\t_end_timeB\x10\n\x0e_exporter_uuid\"\x87\x01\n\x13RequestLeaseRequest\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\"*\n\x14RequestLeaseResponse\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\")\n\x13ReleaseLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x16\n\x14ReleaseLeaseResponse\"\x13\n\x11ListLeasesRequest\"*\n\x12ListLeasesResponse\x12\x14\n\x05names\x18\x01 \x03(\tR\x05names\"\x12\n\x10GetStatusRequest\"v\n\x11GetStatusResponse\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message2\x92\x07\n\x11\x43ontrollerService\x12M\n\x08Register\x12\x1f.jumpstarter.v1.RegisterRequest\x1a .jumpstarter.v1.RegisterResponse\x12S\n\nUnregister\x12!.jumpstarter.v1.UnregisterRequest\x1a\".jumpstarter.v1.UnregisterResponse\x12Y\n\x0cReportStatus\x12#.jumpstarter.v1.ReportStatusRequest\x1a$.jumpstarter.v1.ReportStatusResponse\x12I\n\x06Listen\x12\x1d.jumpstarter.v1.ListenRequest\x1a\x1e.jumpstarter.v1.ListenResponse0\x01\x12I\n\x06Status\x12\x1d.jumpstarter.v1.StatusRequest\x1a\x1e.jumpstarter.v1.StatusResponse0\x01\x12\x41\n\x04\x44ial\x12\x1b.jumpstarter.v1.DialRequest\x1a\x1c.jumpstarter.v1.DialResponse\x12K\n\x0b\x41uditStream\x12\".jumpstarter.v1.AuditStreamRequest\x1a\x16.google.protobuf.Empty(\x01\x12M\n\x08GetLease\x12\x1f.jumpstarter.v1.GetLeaseRequest\x1a .jumpstarter.v1.GetLeaseResponse\x12Y\n\x0cRequestLease\x12#.jumpstarter.v1.RequestLeaseRequest\x1a$.jumpstarter.v1.RequestLeaseResponse\x12Y\n\x0cReleaseLease\x12#.jumpstarter.v1.ReleaseLeaseRequest\x1a$.jumpstarter.v1.ReleaseLeaseResponse\x12S\n\nListLeases\x12!.jumpstarter.v1.ListLeasesRequest\x1a\".jumpstarter.v1.ListLeasesResponse2\x82\x04\n\x0f\x45xporterService\x12\x46\n\tGetReport\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.GetReportResponse\x12S\n\nDriverCall\x12!.jumpstarter.v1.DriverCallRequest\x1a\".jumpstarter.v1.DriverCallResponse\x12p\n\x13StreamingDriverCall\x12*.jumpstarter.v1.StreamingDriverCallRequest\x1a+.jumpstarter.v1.StreamingDriverCallResponse0\x01\x12H\n\tLogStream\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.LogStreamResponse0\x01\x12\x44\n\x05Reset\x12\x1c.jumpstarter.v1.ResetRequest\x1a\x1d.jumpstarter.v1.ResetResponse\x12P\n\tGetStatus\x12 .jumpstarter.v1.GetStatusRequest\x1a!.jumpstarter.v1.GetStatusResponseB\x7f\n\x12\x63om.jumpstarter.v1B\x10JumpstarterProtoP\x01\xa2\x02\x03JXX\xaa\x02\x0eJumpstarter.V1\xca\x02\x0eJumpstarter\\V1\xe2\x02\x1aJumpstarter\\V1\\GPBMetadata\xea\x02\x0fJumpstarter::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n jumpstarter/v1/jumpstarter.proto\x12\x0ejumpstarter.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\x1a\x1bjumpstarter/v1/common.proto\"\xd1\x01\n\x0fRegisterRequest\x12\x43\n\x06labels\x18\x01 \x03(\x0b\x32+.jumpstarter.v1.RegisterRequest.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x02 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xd2\x03\n\x14\x44riverInstanceReport\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12$\n\x0bparent_uuid\x18\x02 \x01(\tH\x00R\nparentUuid\x88\x01\x01\x12H\n\x06labels\x18\x03 \x03(\x0b\x32\x30.jumpstarter.v1.DriverInstanceReport.LabelsEntryR\x06labels\x12%\n\x0b\x64\x65scription\x18\x04 \x01(\tH\x01R\x0b\x64\x65scription\x88\x01\x01\x12m\n\x13methods_description\x18\x05 \x03(\x0b\x32<.jumpstarter.v1.DriverInstanceReport.MethodsDescriptionEntryR\x12methodsDescription\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x45\n\x17MethodsDescriptionEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x0e\n\x0c_parent_uuidB\x0e\n\x0c_description\"&\n\x10RegisterResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\"+\n\x11UnregisterRequest\x12\x16\n\x06reason\x18\x02 \x01(\tR\x06reason\"\x14\n\x12UnregisterResponse\".\n\rListenRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"\\\n\x0eListenResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\x0f\n\rStatusRequest\"\x91\x01\n\x0eStatusResponse\x12\x16\n\x06leased\x18\x01 \x01(\x08R\x06leased\x12\"\n\nlease_name\x18\x02 \x01(\tH\x00R\tleaseName\x88\x01\x01\x12$\n\x0b\x63lient_name\x18\x03 \x01(\tH\x01R\nclientName\x88\x01\x01\x42\r\n\x0b_lease_nameB\x0e\n\x0c_client_name\",\n\x0b\x44ialRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"Z\n\x0c\x44ialResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\xa1\x01\n\x12\x41uditStreamRequest\x12#\n\rexporter_uuid\x18\x01 \x01(\tR\x0c\x65xporterUuid\x12\x30\n\x14\x64river_instance_uuid\x18\x02 \x01(\tR\x12\x64riverInstanceUuid\x12\x1a\n\x08severity\x18\x03 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x04 \x01(\tR\x07message\"x\n\x13ReportStatusRequest\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message\"\x16\n\x14ReportStatusResponse\"\xb8\x02\n\x11GetReportResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x45\n\x06labels\x18\x02 \x03(\x0b\x32-.jumpstarter.v1.GetReportResponse.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x03 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x12M\n\x15\x61lternative_endpoints\x18\x04 \x03(\x0b\x32\x18.jumpstarter.v1.EndpointR\x14\x61lternativeEndpoints\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xa5\x01\n\x08\x45ndpoint\x12\x1a\n\x08\x65ndpoint\x18\x01 \x01(\tR\x08\x65ndpoint\x12 \n\x0b\x63\x65rtificate\x18\x02 \x01(\tR\x0b\x63\x65rtificate\x12-\n\x12\x63lient_certificate\x18\x03 \x01(\tR\x11\x63lientCertificate\x12,\n\x12\x63lient_private_key\x18\x04 \x01(\tR\x10\x63lientPrivateKey\"k\n\x11\x44riverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"X\n\x12\x44riverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"t\n\x1aStreamingDriverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"a\n\x1bStreamingDriverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"\xa0\x01\n\x11LogStreamResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x1a\n\x08severity\x18\x02 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x36\n\x06source\x18\x04 \x01(\x0e\x32\x19.jumpstarter.v1.LogSourceH\x00R\x06source\x88\x01\x01\x42\t\n\x07_source\"\x0e\n\x0cResetRequest\"\x0f\n\rResetResponse\"%\n\x0fGetLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x93\x03\n\x10GetLeaseResponse\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\x12>\n\nbegin_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\tbeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01R\x07\x65ndTime\x88\x01\x01\x12(\n\rexporter_uuid\x18\x05 \x01(\tH\x02R\x0c\x65xporterUuid\x88\x01\x01\x12\x39\n\nconditions\x18\x06 \x03(\x0b\x32\x19.jumpstarter.v1.ConditionR\nconditionsB\r\n\x0b_begin_timeB\x0b\n\t_end_timeB\x10\n\x0e_exporter_uuid\"\x87\x01\n\x13RequestLeaseRequest\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\"*\n\x14RequestLeaseResponse\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\")\n\x13ReleaseLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x16\n\x14ReleaseLeaseResponse\"\x13\n\x11ListLeasesRequest\"*\n\x12ListLeasesResponse\x12\x14\n\x05names\x18\x01 \x03(\tR\x05names\"\x12\n\x10GetStatusRequest\"v\n\x11GetStatusResponse\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message\"\x13\n\x11\x45ndSessionRequest\"Y\n\x12\x45ndSessionResponse\x12\x18\n\x07success\x18\x01 \x01(\x08R\x07success\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message2\x92\x07\n\x11\x43ontrollerService\x12M\n\x08Register\x12\x1f.jumpstarter.v1.RegisterRequest\x1a .jumpstarter.v1.RegisterResponse\x12S\n\nUnregister\x12!.jumpstarter.v1.UnregisterRequest\x1a\".jumpstarter.v1.UnregisterResponse\x12Y\n\x0cReportStatus\x12#.jumpstarter.v1.ReportStatusRequest\x1a$.jumpstarter.v1.ReportStatusResponse\x12I\n\x06Listen\x12\x1d.jumpstarter.v1.ListenRequest\x1a\x1e.jumpstarter.v1.ListenResponse0\x01\x12I\n\x06Status\x12\x1d.jumpstarter.v1.StatusRequest\x1a\x1e.jumpstarter.v1.StatusResponse0\x01\x12\x41\n\x04\x44ial\x12\x1b.jumpstarter.v1.DialRequest\x1a\x1c.jumpstarter.v1.DialResponse\x12K\n\x0b\x41uditStream\x12\".jumpstarter.v1.AuditStreamRequest\x1a\x16.google.protobuf.Empty(\x01\x12M\n\x08GetLease\x12\x1f.jumpstarter.v1.GetLeaseRequest\x1a .jumpstarter.v1.GetLeaseResponse\x12Y\n\x0cRequestLease\x12#.jumpstarter.v1.RequestLeaseRequest\x1a$.jumpstarter.v1.RequestLeaseResponse\x12Y\n\x0cReleaseLease\x12#.jumpstarter.v1.ReleaseLeaseRequest\x1a$.jumpstarter.v1.ReleaseLeaseResponse\x12S\n\nListLeases\x12!.jumpstarter.v1.ListLeasesRequest\x1a\".jumpstarter.v1.ListLeasesResponse2\xd7\x04\n\x0f\x45xporterService\x12\x46\n\tGetReport\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.GetReportResponse\x12S\n\nDriverCall\x12!.jumpstarter.v1.DriverCallRequest\x1a\".jumpstarter.v1.DriverCallResponse\x12p\n\x13StreamingDriverCall\x12*.jumpstarter.v1.StreamingDriverCallRequest\x1a+.jumpstarter.v1.StreamingDriverCallResponse0\x01\x12H\n\tLogStream\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.LogStreamResponse0\x01\x12\x44\n\x05Reset\x12\x1c.jumpstarter.v1.ResetRequest\x1a\x1d.jumpstarter.v1.ResetResponse\x12P\n\tGetStatus\x12 .jumpstarter.v1.GetStatusRequest\x1a!.jumpstarter.v1.GetStatusResponse\x12S\n\nEndSession\x12!.jumpstarter.v1.EndSessionRequest\x1a\".jumpstarter.v1.EndSessionResponseB\x7f\n\x12\x63om.jumpstarter.v1B\x10JumpstarterProtoP\x01\xa2\x02\x03JXX\xaa\x02\x0eJumpstarter.V1\xca\x02\x0eJumpstarter\\V1\xe2\x02\x1aJumpstarter\\V1\\GPBMetadata\xea\x02\x0fJumpstarter::V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -120,8 +120,12 @@ _globals['_GETSTATUSREQUEST']._serialized_end=3653 _globals['_GETSTATUSRESPONSE']._serialized_start=3655 _globals['_GETSTATUSRESPONSE']._serialized_end=3773 - _globals['_CONTROLLERSERVICE']._serialized_start=3776 - _globals['_CONTROLLERSERVICE']._serialized_end=4690 - _globals['_EXPORTERSERVICE']._serialized_start=4693 - _globals['_EXPORTERSERVICE']._serialized_end=5207 + _globals['_ENDSESSIONREQUEST']._serialized_start=3775 + _globals['_ENDSESSIONREQUEST']._serialized_end=3794 + _globals['_ENDSESSIONRESPONSE']._serialized_start=3796 + _globals['_ENDSESSIONRESPONSE']._serialized_end=3885 + _globals['_CONTROLLERSERVICE']._serialized_start=3888 + _globals['_CONTROLLERSERVICE']._serialized_end=4802 + _globals['_EXPORTERSERVICE']._serialized_start=4805 + _globals['_EXPORTERSERVICE']._serialized_end=5404 # @@protoc_insertion_point(module_scope) diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi index 762c46c6..29bae1d5 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi @@ -715,3 +715,33 @@ class GetStatusResponse(google.protobuf.message.Message): def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... Global___GetStatusResponse: typing_extensions.TypeAlias = GetStatusResponse + +@typing.final +class EndSessionRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___EndSessionRequest: typing_extensions.TypeAlias = EndSessionRequest + +@typing.final +class EndSessionResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SUCCESS_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + success: builtins.bool + message: builtins.str + def __init__( + self, + *, + success: builtins.bool = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_message", b"_message", "message", b"message"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_message", b"_message", "message", b"message", "success", b"success"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + +Global___EndSessionResponse: typing_extensions.TypeAlias = EndSessionResponse diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py index 641a345b..8360da0c 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py @@ -572,6 +572,11 @@ def __init__(self, channel): request_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusRequest.SerializeToString, response_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusResponse.FromString, _registered_method=True) + self.EndSession = channel.unary_unary( + '/jumpstarter.v1.ExporterService/EndSession', + request_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.EndSessionRequest.SerializeToString, + response_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.EndSessionResponse.FromString, + _registered_method=True) class ExporterServiceServicer(object): @@ -616,6 +621,15 @@ def GetStatus(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def EndSession(self, request, context): + """End the current session, triggering the afterLease hook + The client should keep the connection open to receive hook logs via LogStream + Returns after the afterLease hook completes + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def add_ExporterServiceServicer_to_server(servicer, server): rpc_method_handlers = { @@ -649,6 +663,11 @@ def add_ExporterServiceServicer_to_server(servicer, server): request_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusRequest.FromString, response_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusResponse.SerializeToString, ), + 'EndSession': grpc.unary_unary_rpc_method_handler( + servicer.EndSession, + request_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.EndSessionRequest.FromString, + response_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.EndSessionResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( 'jumpstarter.v1.ExporterService', rpc_method_handlers) @@ -823,3 +842,30 @@ def GetStatus(request, timeout, metadata, _registered_method=True) + + @staticmethod + def EndSession(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/jumpstarter.v1.ExporterService/EndSession', + jumpstarter_dot_v1_dot_jumpstarter__pb2.EndSessionRequest.SerializeToString, + jumpstarter_dot_v1_dot_jumpstarter__pb2.EndSessionResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi index 78c9ffbb..8352c5f4 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi @@ -600,7 +600,23 @@ _ExporterServiceGetStatusType = typing_extensions.TypeVar( ], ) -class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _ExporterServiceDriverCallType, _ExporterServiceStreamingDriverCallType, _ExporterServiceLogStreamType, _ExporterServiceResetType, _ExporterServiceGetStatusType]): +_ExporterServiceEndSessionType = typing_extensions.TypeVar( + '_ExporterServiceEndSessionType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.EndSessionRequest, + jumpstarter.v1.jumpstarter_pb2.EndSessionResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.EndSessionRequest, + jumpstarter.v1.jumpstarter_pb2.EndSessionResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.EndSessionRequest, + jumpstarter.v1.jumpstarter_pb2.EndSessionResponse, + ], +) + +class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _ExporterServiceDriverCallType, _ExporterServiceStreamingDriverCallType, _ExporterServiceLogStreamType, _ExporterServiceResetType, _ExporterServiceGetStatusType, _ExporterServiceEndSessionType]): """A service a exporter can share locally to be used without a server Channel/Call credentials are used to authenticate the client, and routing to the right exporter """ @@ -631,6 +647,10 @@ class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _Exporte jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.EndSessionRequest, + jumpstarter.v1.jumpstarter_pb2.EndSessionResponse, + ], ], channel: grpc.Channel) -> None: ... @typing.overload @@ -659,6 +679,10 @@ class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _Exporte jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.EndSessionRequest, + jumpstarter.v1.jumpstarter_pb2.EndSessionResponse, + ], ], channel: grpc.aio.Channel) -> None: ... GetReport: _ExporterServiceGetReportType @@ -674,6 +698,12 @@ class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _Exporte GetStatus: _ExporterServiceGetStatusType + EndSession: _ExporterServiceEndSessionType + """End the current session, triggering the afterLease hook + The client should keep the connection open to receive hook logs via LogStream + Returns after the afterLease hook completes + """ + ExporterServiceAsyncStub: typing_extensions.TypeAlias = ExporterServiceStub[ grpc.aio.UnaryUnaryMultiCallable[ google.protobuf.empty_pb2.Empty, @@ -699,6 +729,10 @@ ExporterServiceAsyncStub: typing_extensions.TypeAlias = ExporterServiceStub[ jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.EndSessionRequest, + jumpstarter.v1.jumpstarter_pb2.EndSessionResponse, + ], ] class ExporterServiceServicer(metaclass=abc.ABCMeta): @@ -749,4 +783,15 @@ class ExporterServiceServicer(metaclass=abc.ABCMeta): context: _ServicerContext, ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetStatusResponse]]: ... + @abc.abstractmethod + def EndSession( + self, + request: jumpstarter.v1.jumpstarter_pb2.EndSessionRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.EndSessionResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.EndSessionResponse]]: + """End the current session, triggering the afterLease hook + The client should keep the connection open to receive hook logs via LogStream + Returns after the afterLease hook completes + """ + def add_ExporterServiceServicer_to_server(servicer: ExporterServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/python/packages/jumpstarter/jumpstarter/client/core.py b/python/packages/jumpstarter/jumpstarter/client/core.py index 85a7267e..d264d0f6 100644 --- a/python/packages/jumpstarter/jumpstarter/client/core.py +++ b/python/packages/jumpstarter/jumpstarter/client/core.py @@ -84,6 +84,22 @@ def __post_init__(self): handler = RichHandler() self.logger.addHandler(handler) + async def get_status_async(self) -> ExporterStatus | None: + """Get the current exporter status. + + Returns: + The current ExporterStatus, or None if GetStatus is not implemented. + """ + try: + response = await self.stub.GetStatus(jumpstarter_pb2.GetStatusRequest()) + return ExporterStatus.from_proto(response.status) + except AioRpcError as e: + # If GetStatus is not implemented, return None for backward compatibility + if e.code() == StatusCode.UNIMPLEMENTED: + self.logger.debug("GetStatus not implemented") + return None + raise DriverError(f"Failed to get exporter status: {e.details()}") from e + async def check_exporter_status(self): """Check if the exporter is ready to accept driver calls. @@ -98,19 +114,33 @@ async def check_exporter_status(self): ExporterStatus.AFTER_LEASE_HOOK, } - try: - response = await self.stub.GetStatus(jumpstarter_pb2.GetStatusRequest()) - status = ExporterStatus.from_proto(response.status) + status = await self.get_status_async() + if status is None: + # GetStatus not implemented, assume ready for backward compatibility + return + + if status not in ALLOWED_STATUSES: + raise ExporterNotReady(f"Exporter status is {status}") - if status not in ALLOWED_STATUSES: - raise ExporterNotReady(f"Exporter status is {status}: {response.message}") + async def end_session_async(self) -> bool: + """End the current session and wait for afterLease hook to complete. + This signals the exporter to run the afterLease hook while keeping + the connection open, allowing the client to receive hook logs. + + Returns: + True if the session ended successfully, False if EndSession is not implemented. + """ + try: + response = await self.stub.EndSession(jumpstarter_pb2.EndSessionRequest()) + self.logger.debug("EndSession completed: success=%s, message=%s", response.success, response.message) + return response.success except AioRpcError as e: - # If GetStatus is not implemented, assume ready for backward compatibility + # If EndSession is not implemented, return False for backward compatibility if e.code() == StatusCode.UNIMPLEMENTED: - self.logger.debug("GetStatus not implemented, assuming exporter is ready") - return - raise DriverError(f"Failed to check exporter status: {e.details()}") from e + self.logger.debug("EndSession not implemented") + return False + raise DriverError(f"Failed to end session: {e.details()}") from e async def call_async(self, method, *args): """Make DriverCall by method name and arguments""" diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index 15e8c37d..6484c3be 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -377,6 +377,37 @@ async def _handle_client_conn( except Exception as e: logger.info("failed to handle connection: {}".format(e)) + async def _handle_end_session(self, lease_context: LeaseContext) -> None: + """Handle EndSession requests from client. + + Waits for the end_session_requested event, runs the afterLease hook, + and signals after_lease_hook_done when complete. This allows clients + to receive afterLease hook logs before the connection is closed. + + Args: + lease_context: The LeaseContext for the current lease. + """ + # Wait for client to signal end of session + await lease_context.end_session_requested.wait() + logger.info("EndSession requested, running afterLease hook") + + try: + if self.hook_executor and lease_context.has_client(): + with CancelScope(shield=True): + await self.hook_executor.run_after_lease_hook( + lease_context, + self._report_status, + self.stop, + ) + logger.info("afterLease hook completed via EndSession") + else: + logger.debug("No afterLease hook configured or no client, skipping") + except Exception as e: + logger.error("Error running afterLease hook via EndSession: %s", e) + finally: + # Signal that the hook is done (whether it ran or not) + lease_context.after_lease_hook_done.set() + @asynccontextmanager async def session(self): """Create and manage an exporter Session context. @@ -441,6 +472,8 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseC # Populate the lease scope with session and socket path lease_scope.session = session lease_scope.socket_path = path + # Link session to lease context for EndSession RPC + session.lease_context = lease_scope # Wait for before-lease hook to complete before processing client connections logger.info("Waiting for before-lease hook to complete before accepting connections") @@ -451,6 +484,9 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseC # status from serve() rather than the default AVAILABLE session.update_status(lease_scope.current_status, lease_scope.status_message) + # Start task to handle EndSession requests (runs afterLease hook when client signals done) + tg.start_soon(self._handle_end_session, lease_scope) + # Process client connections # Type: request is jumpstarter_pb2.ListenResponse with router_endpoint and router_token fields async for request in listen_rx: @@ -491,7 +527,12 @@ async def serve(self): # noqa: C901 ) if lease_changed: # After-lease hook for the previous lease (lease name changed) - if self.hook_executor and self._lease_context.has_client(): + # Skip if already run via EndSession + if ( + self.hook_executor + and self._lease_context.has_client() + and not self._lease_context.after_lease_hook_done.is_set() + ): with CancelScope(shield=True): await self.hook_executor.run_after_lease_hook( self._lease_context, @@ -544,11 +585,13 @@ async def serve(self): # noqa: C901 logger.info("Currently not leased") # After-lease hook when transitioning from leased to unleased + # Skip if already run via EndSession if ( previous_leased and self.hook_executor and self._lease_context and self._lease_context.has_client() + and not self._lease_context.after_lease_hook_done.is_set() ): # Shield the after-lease hook from cancellation with CancelScope(shield=True): diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py index d3ec0e36..b95f8c7b 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -165,63 +165,67 @@ async def _execute_hook_process( cause: Exception | None = None timed_out = False - try: - # Execute the hook command using shell via anyio - # Pass the command as a string to use shell mode - async with await open_process( - command, - env=hook_env, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) as process: - output_lines: list[str] = [] - - async def read_output() -> None: - """Read stdout line by line.""" - assert process.stdout is not None - buffer = b"" - async for chunk in process.stdout: - buffer += chunk - while b"\n" in buffer: - line, buffer = buffer.split(b"\n", 1) - line_decoded = line.decode().rstrip() - output_lines.append(line_decoded) - logger.info("[hook output] %s", line_decoded) - # Handle any remaining data without newline - if buffer: - line_decoded = buffer.decode().rstrip() - if line_decoded: - output_lines.append(line_decoded) - logger.info("[hook output] %s", line_decoded) - - # Use move_on_after for timeout - with anyio.move_on_after(timeout) as cancel_scope: - await read_output() - await process.wait() - - if cancel_scope.cancelled_caught: - timed_out = True - error_msg = f"Hook timed out after {timeout} seconds" - logger.error(error_msg) - # Terminate the process - process.terminate() - # Give it a moment to terminate gracefully - with anyio.move_on_after(5): + # Route hook output logs to the client via the session's log stream + with logging_session.context_log_source(__name__, log_source): + try: + # Execute the hook command using shell via anyio + # Pass the command as a string to use shell mode + async with await open_process( + command, + env=hook_env, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) as process: + output_lines: list[str] = [] + # Create hook label for log output (e.g., "beforeLease" or "afterLease") + hook_label = "beforeLease" if hook_type == "before_lease" else "afterLease" + + async def read_output() -> None: + """Read stdout line by line.""" + assert process.stdout is not None + buffer = b"" + async for chunk in process.stdout: + buffer += chunk + while b"\n" in buffer: + line, buffer = buffer.split(b"\n", 1) + line_decoded = line.decode().rstrip() + output_lines.append(line_decoded) + logger.info("[%s hook] %s", hook_label, line_decoded) + # Handle any remaining data without newline + if buffer: + line_decoded = buffer.decode().rstrip() + if line_decoded: + output_lines.append(line_decoded) + logger.info("[%s hook] %s", hook_label, line_decoded) + + # Use move_on_after for timeout + with anyio.move_on_after(timeout) as cancel_scope: + await read_output() await process.wait() - # Force kill if still running - if process.returncode is None: - process.kill() - elif process.returncode == 0: - logger.info("Hook executed successfully") - return - else: - error_msg = f"Hook failed with exit code {process.returncode}" - - except Exception as e: - error_msg = f"Error executing hook: {e}" - cause = e - logger.error(error_msg, exc_info=True) + if cancel_scope.cancelled_caught: + timed_out = True + error_msg = f"Hook timed out after {timeout} seconds" + logger.error(error_msg) + # Terminate the process + process.terminate() + # Give it a moment to terminate gracefully + with anyio.move_on_after(5): + await process.wait() + # Force kill if still running + if process.returncode is None: + process.kill() + + elif process.returncode == 0: + logger.info("Hook executed successfully") + return + else: + error_msg = f"Hook failed with exit code {process.returncode}" + + except Exception as e: + error_msg = f"Error executing hook: {e}" + cause = e + logger.error(error_msg, exc_info=True) # Handle failure if one occurred if error_msg is not None: diff --git a/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py b/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py index 9e887839..d91dfda3 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py @@ -28,6 +28,8 @@ class LeaseContext: session: The Session object managing the device and gRPC services (set in handle_lease) socket_path: Unix socket path where the session is serving (set in handle_lease) before_lease_hook: Event that signals when before-lease hook completes + end_session_requested: Event that signals when client requests end session (to run afterLease hook) + after_lease_hook_done: Event that signals when afterLease hook has completed client_name: Name of the client currently holding the lease (empty if unleased) current_status: Current exporter status (stored here for access before session is created) status_message: Message describing the current status @@ -35,6 +37,8 @@ class LeaseContext: lease_name: str before_lease_hook: Event + end_session_requested: Event = field(default_factory=Event) + after_lease_hook_done: Event = field(default_factory=Event) session: "Session | None" = None socket_path: str = "" client_name: str = field(default="") diff --git a/python/packages/jumpstarter/jumpstarter/exporter/session.py b/python/packages/jumpstarter/jumpstarter/exporter/session.py index 663c39e0..b10359ed 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/session.py @@ -25,6 +25,7 @@ if TYPE_CHECKING: from jumpstarter.driver import Driver + from jumpstarter.exporter.lease_context import LeaseContext logger = logging.getLogger(__name__) @@ -38,6 +39,7 @@ class Session( ): root_device: "Driver" mapping: dict[UUID, "Driver"] + lease_context: "LeaseContext | None" = field(init=False, default=None) _logging_queue: deque = field(init=False) _logging_handler: QueueHandler = field(init=False) @@ -176,3 +178,37 @@ async def GetStatus(self, request, context): status=self._current_status.to_proto(), message=self._status_message, ) + + async def EndSession(self, request, context): + """End the current session and trigger the afterLease hook. + + This is called by the client when it's done with the session but wants + to keep the connection open to receive logs from the afterLease hook. + The method signals the end_session_requested event and waits for the + afterLease hook to complete before returning. + + Returns: + EndSessionResponse with success status and optional message. + """ + logger.info("EndSession called by client") + + if self.lease_context is None: + logger.warning("EndSession called but no lease context available") + return jumpstarter_pb2.EndSessionResponse( + success=False, + message="No active lease context", + ) + + # Signal that the client wants to end the session + logger.debug("Setting end_session_requested event") + self.lease_context.end_session_requested.set() + + # Wait for the afterLease hook to complete + logger.debug("Waiting for after_lease_hook_done event") + await self.lease_context.after_lease_hook_done.wait() + logger.info("EndSession complete, afterLease hook finished") + + return jumpstarter_pb2.EndSessionResponse( + success=True, + message="Session ended and afterLease hook completed", + ) From 1fd2f1de46be63724f9bd90c9107dd5e9eca89b9 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 19 Jan 2026 18:07:38 -0500 Subject: [PATCH 29/30] Improve lease handling and eliminate race conditions causing leases to get blocked --- .../jumpstarter_cli_common/opt.py | 2 +- .../jumpstarter-cli/jumpstarter_cli/shell.py | 73 +++---- .../jumpstarter/v1/jumpstarter_pb2.py | 102 +++++----- .../jumpstarter/v1/jumpstarter_pb2.pyi | 11 +- .../jumpstarter/jumpstarter/client/core.py | 172 ++++++++++++++-- .../jumpstarter/jumpstarter/client/lease.py | 8 +- .../jumpstarter/exporter/exporter.py | 95 +++++++-- .../jumpstarter/jumpstarter/exporter/hooks.py | 185 +++++++++++++----- .../jumpstarter/exporter/lease_context.py | 18 ++ .../jumpstarter/exporter/logging.py | 2 +- .../jumpstarter/exporter/session.py | 5 +- 11 files changed, 500 insertions(+), 173 deletions(-) diff --git a/python/packages/jumpstarter-cli-common/jumpstarter_cli_common/opt.py b/python/packages/jumpstarter-cli-common/jumpstarter_cli_common/opt.py index 92184de2..7d34d335 100644 --- a/python/packages/jumpstarter-cli-common/jumpstarter_cli_common/opt.py +++ b/python/packages/jumpstarter-cli-common/jumpstarter_cli_common/opt.py @@ -16,7 +16,7 @@ def _opt_log_level_callback(ctx, param, value): # on a exporter run we don't want to use RichHandler for logs, just plain logs for the system journal basicConfig = partial(logging.basicConfig) else: - basicConfig = partial(logging.basicConfig, handlers=[RichHandler()]) + basicConfig = partial(logging.basicConfig, format="%(message)s [%(name)s]", handlers=[RichHandler(show_path=False)]) if value: basicConfig(level=value.upper()) diff --git a/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py b/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py index e83d4146..fd1ef677 100644 --- a/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py +++ b/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py @@ -93,10 +93,12 @@ async def _run_shell_with_lease_async(lease, exporter_logs, config, command, can """Run shell with lease context managers and wait for afterLease hook if logs enabled. When exporter_logs is enabled, this function will: - 1. Run the shell command - 2. After shell exits, call EndSession to trigger and wait for afterLease hook - 3. Logs stream to client during hook execution - 4. Release the lease after hook completes + 1. Connect and start log streaming + 2. Wait for beforeLease hook to complete (logs stream in real-time) + 3. Run the shell command + 4. After shell exits, call EndSession to trigger and wait for afterLease hook + 5. Logs stream to client during hook execution + 6. Release the lease after hook completes If Ctrl+C is pressed during EndSession, the wait is skipped but the lease is still released. """ @@ -106,39 +108,44 @@ async def _run_shell_with_lease_async(lease, exporter_logs, config, command, can async with lease.serve_unix_async() as path: async with lease.monitor_async(): - if exporter_logs: - # Use ExitStack for the client (required by client_from_path) - with ExitStack() as stack: - async with client_from_path( - path, lease.portal, stack, allow=lease.allow, unsafe=lease.unsafe - ) as client: - async with client.log_stream_async(): - # Run the shell command - exit_code = await anyio.to_thread.run_sync( - _run_shell_only, lease, config, command, path - ) - - # Shell has exited. Call EndSession to trigger afterLease hook - # while keeping log stream open to receive hook logs - if lease.name and not cancel_scope.cancel_called: - logger.info("Running afterLease hook (Ctrl+C to skip)...") - try: - # EndSession triggers the afterLease hook and waits for completion - # Logs are streamed to us during hook execution + # Use ExitStack for the client (required by client_from_path) + with ExitStack() as stack: + async with client_from_path( + path, lease.portal, stack, allow=lease.allow, unsafe=lease.unsafe + ) as client: + async with client.log_stream_async(show_all_logs=exporter_logs): + # Wait for beforeLease hook to complete while logs are streaming + # This allows hook output to be displayed in real-time + logger.debug("Waiting for beforeLease hook to complete...") + await client.wait_for_lease_ready() + logger.debug("wait_for_lease_ready returned, launching shell...") + + # Run the shell command + exit_code = await anyio.to_thread.run_sync( + _run_shell_only, lease, config, command, path + ) + + # Shell has exited. Call EndSession to trigger afterLease hook + # while keeping log stream open to receive hook logs. + # EndSession waits for the hook to complete on the server side, + # so we don't need additional client-side polling. + if lease.name and not cancel_scope.cancel_called: + logger.info("Running afterLease hook (Ctrl+C to skip)...") + try: + # EndSession triggers the afterLease hook and waits for completion + # Use a timeout to prevent hanging if the connection is disrupted + with anyio.move_on_after(300) as timeout_scope: # 5 minute timeout success = await client.end_session_async() if success: - logger.debug("EndSession completed successfully") + logger.info("Lease released") else: - logger.debug("EndSession not implemented, skipping hook wait") - except Exception as e: - logger.warning("Error during EndSession: %s", e) + logger.debug("EndSession not implemented, skipping hook") + if timeout_scope.cancelled_caught: + logger.warning("Timeout waiting for afterLease hook to complete") + except Exception as e: + logger.warning("Error during EndSession: %s", e) - return exit_code - else: - exit_code = await anyio.to_thread.run_sync( - _run_shell_only, lease, config, command, path - ) - return exit_code + return exit_code async def _shell_with_signal_handling( diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py index cf415caa..02b070cb 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py @@ -30,7 +30,7 @@ from . import common_pb2 as jumpstarter_dot_v1_dot_common__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n jumpstarter/v1/jumpstarter.proto\x12\x0ejumpstarter.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\x1a\x1bjumpstarter/v1/common.proto\"\xd1\x01\n\x0fRegisterRequest\x12\x43\n\x06labels\x18\x01 \x03(\x0b\x32+.jumpstarter.v1.RegisterRequest.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x02 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xd2\x03\n\x14\x44riverInstanceReport\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12$\n\x0bparent_uuid\x18\x02 \x01(\tH\x00R\nparentUuid\x88\x01\x01\x12H\n\x06labels\x18\x03 \x03(\x0b\x32\x30.jumpstarter.v1.DriverInstanceReport.LabelsEntryR\x06labels\x12%\n\x0b\x64\x65scription\x18\x04 \x01(\tH\x01R\x0b\x64\x65scription\x88\x01\x01\x12m\n\x13methods_description\x18\x05 \x03(\x0b\x32<.jumpstarter.v1.DriverInstanceReport.MethodsDescriptionEntryR\x12methodsDescription\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x45\n\x17MethodsDescriptionEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x0e\n\x0c_parent_uuidB\x0e\n\x0c_description\"&\n\x10RegisterResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\"+\n\x11UnregisterRequest\x12\x16\n\x06reason\x18\x02 \x01(\tR\x06reason\"\x14\n\x12UnregisterResponse\".\n\rListenRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"\\\n\x0eListenResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\x0f\n\rStatusRequest\"\x91\x01\n\x0eStatusResponse\x12\x16\n\x06leased\x18\x01 \x01(\x08R\x06leased\x12\"\n\nlease_name\x18\x02 \x01(\tH\x00R\tleaseName\x88\x01\x01\x12$\n\x0b\x63lient_name\x18\x03 \x01(\tH\x01R\nclientName\x88\x01\x01\x42\r\n\x0b_lease_nameB\x0e\n\x0c_client_name\",\n\x0b\x44ialRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"Z\n\x0c\x44ialResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\xa1\x01\n\x12\x41uditStreamRequest\x12#\n\rexporter_uuid\x18\x01 \x01(\tR\x0c\x65xporterUuid\x12\x30\n\x14\x64river_instance_uuid\x18\x02 \x01(\tR\x12\x64riverInstanceUuid\x12\x1a\n\x08severity\x18\x03 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x04 \x01(\tR\x07message\"x\n\x13ReportStatusRequest\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message\"\x16\n\x14ReportStatusResponse\"\xb8\x02\n\x11GetReportResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x45\n\x06labels\x18\x02 \x03(\x0b\x32-.jumpstarter.v1.GetReportResponse.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x03 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x12M\n\x15\x61lternative_endpoints\x18\x04 \x03(\x0b\x32\x18.jumpstarter.v1.EndpointR\x14\x61lternativeEndpoints\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xa5\x01\n\x08\x45ndpoint\x12\x1a\n\x08\x65ndpoint\x18\x01 \x01(\tR\x08\x65ndpoint\x12 \n\x0b\x63\x65rtificate\x18\x02 \x01(\tR\x0b\x63\x65rtificate\x12-\n\x12\x63lient_certificate\x18\x03 \x01(\tR\x11\x63lientCertificate\x12,\n\x12\x63lient_private_key\x18\x04 \x01(\tR\x10\x63lientPrivateKey\"k\n\x11\x44riverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"X\n\x12\x44riverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"t\n\x1aStreamingDriverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"a\n\x1bStreamingDriverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"\xa0\x01\n\x11LogStreamResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x1a\n\x08severity\x18\x02 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x36\n\x06source\x18\x04 \x01(\x0e\x32\x19.jumpstarter.v1.LogSourceH\x00R\x06source\x88\x01\x01\x42\t\n\x07_source\"\x0e\n\x0cResetRequest\"\x0f\n\rResetResponse\"%\n\x0fGetLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x93\x03\n\x10GetLeaseResponse\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\x12>\n\nbegin_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\tbeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01R\x07\x65ndTime\x88\x01\x01\x12(\n\rexporter_uuid\x18\x05 \x01(\tH\x02R\x0c\x65xporterUuid\x88\x01\x01\x12\x39\n\nconditions\x18\x06 \x03(\x0b\x32\x19.jumpstarter.v1.ConditionR\nconditionsB\r\n\x0b_begin_timeB\x0b\n\t_end_timeB\x10\n\x0e_exporter_uuid\"\x87\x01\n\x13RequestLeaseRequest\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\"*\n\x14RequestLeaseResponse\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\")\n\x13ReleaseLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x16\n\x14ReleaseLeaseResponse\"\x13\n\x11ListLeasesRequest\"*\n\x12ListLeasesResponse\x12\x14\n\x05names\x18\x01 \x03(\tR\x05names\"\x12\n\x10GetStatusRequest\"v\n\x11GetStatusResponse\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message\"\x13\n\x11\x45ndSessionRequest\"Y\n\x12\x45ndSessionResponse\x12\x18\n\x07success\x18\x01 \x01(\x08R\x07success\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message2\x92\x07\n\x11\x43ontrollerService\x12M\n\x08Register\x12\x1f.jumpstarter.v1.RegisterRequest\x1a .jumpstarter.v1.RegisterResponse\x12S\n\nUnregister\x12!.jumpstarter.v1.UnregisterRequest\x1a\".jumpstarter.v1.UnregisterResponse\x12Y\n\x0cReportStatus\x12#.jumpstarter.v1.ReportStatusRequest\x1a$.jumpstarter.v1.ReportStatusResponse\x12I\n\x06Listen\x12\x1d.jumpstarter.v1.ListenRequest\x1a\x1e.jumpstarter.v1.ListenResponse0\x01\x12I\n\x06Status\x12\x1d.jumpstarter.v1.StatusRequest\x1a\x1e.jumpstarter.v1.StatusResponse0\x01\x12\x41\n\x04\x44ial\x12\x1b.jumpstarter.v1.DialRequest\x1a\x1c.jumpstarter.v1.DialResponse\x12K\n\x0b\x41uditStream\x12\".jumpstarter.v1.AuditStreamRequest\x1a\x16.google.protobuf.Empty(\x01\x12M\n\x08GetLease\x12\x1f.jumpstarter.v1.GetLeaseRequest\x1a .jumpstarter.v1.GetLeaseResponse\x12Y\n\x0cRequestLease\x12#.jumpstarter.v1.RequestLeaseRequest\x1a$.jumpstarter.v1.RequestLeaseResponse\x12Y\n\x0cReleaseLease\x12#.jumpstarter.v1.ReleaseLeaseRequest\x1a$.jumpstarter.v1.ReleaseLeaseResponse\x12S\n\nListLeases\x12!.jumpstarter.v1.ListLeasesRequest\x1a\".jumpstarter.v1.ListLeasesResponse2\xd7\x04\n\x0f\x45xporterService\x12\x46\n\tGetReport\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.GetReportResponse\x12S\n\nDriverCall\x12!.jumpstarter.v1.DriverCallRequest\x1a\".jumpstarter.v1.DriverCallResponse\x12p\n\x13StreamingDriverCall\x12*.jumpstarter.v1.StreamingDriverCallRequest\x1a+.jumpstarter.v1.StreamingDriverCallResponse0\x01\x12H\n\tLogStream\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.LogStreamResponse0\x01\x12\x44\n\x05Reset\x12\x1c.jumpstarter.v1.ResetRequest\x1a\x1d.jumpstarter.v1.ResetResponse\x12P\n\tGetStatus\x12 .jumpstarter.v1.GetStatusRequest\x1a!.jumpstarter.v1.GetStatusResponse\x12S\n\nEndSession\x12!.jumpstarter.v1.EndSessionRequest\x1a\".jumpstarter.v1.EndSessionResponseB\x7f\n\x12\x63om.jumpstarter.v1B\x10JumpstarterProtoP\x01\xa2\x02\x03JXX\xaa\x02\x0eJumpstarter.V1\xca\x02\x0eJumpstarter\\V1\xe2\x02\x1aJumpstarter\\V1\\GPBMetadata\xea\x02\x0fJumpstarter::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n jumpstarter/v1/jumpstarter.proto\x12\x0ejumpstarter.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\x1a\x1bjumpstarter/v1/common.proto\"\xd1\x01\n\x0fRegisterRequest\x12\x43\n\x06labels\x18\x01 \x03(\x0b\x32+.jumpstarter.v1.RegisterRequest.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x02 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xd2\x03\n\x14\x44riverInstanceReport\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12$\n\x0bparent_uuid\x18\x02 \x01(\tH\x00R\nparentUuid\x88\x01\x01\x12H\n\x06labels\x18\x03 \x03(\x0b\x32\x30.jumpstarter.v1.DriverInstanceReport.LabelsEntryR\x06labels\x12%\n\x0b\x64\x65scription\x18\x04 \x01(\tH\x01R\x0b\x64\x65scription\x88\x01\x01\x12m\n\x13methods_description\x18\x05 \x03(\x0b\x32<.jumpstarter.v1.DriverInstanceReport.MethodsDescriptionEntryR\x12methodsDescription\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x45\n\x17MethodsDescriptionEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x0e\n\x0c_parent_uuidB\x0e\n\x0c_description\"&\n\x10RegisterResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\"+\n\x11UnregisterRequest\x12\x16\n\x06reason\x18\x02 \x01(\tR\x06reason\"\x14\n\x12UnregisterResponse\".\n\rListenRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"\\\n\x0eListenResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\x0f\n\rStatusRequest\"\x91\x01\n\x0eStatusResponse\x12\x16\n\x06leased\x18\x01 \x01(\x08R\x06leased\x12\"\n\nlease_name\x18\x02 \x01(\tH\x00R\tleaseName\x88\x01\x01\x12$\n\x0b\x63lient_name\x18\x03 \x01(\tH\x01R\nclientName\x88\x01\x01\x42\r\n\x0b_lease_nameB\x0e\n\x0c_client_name\",\n\x0b\x44ialRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"Z\n\x0c\x44ialResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\xa1\x01\n\x12\x41uditStreamRequest\x12#\n\rexporter_uuid\x18\x01 \x01(\tR\x0c\x65xporterUuid\x12\x30\n\x14\x64river_instance_uuid\x18\x02 \x01(\tR\x12\x64riverInstanceUuid\x12\x1a\n\x08severity\x18\x03 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x04 \x01(\tR\x07message\"\xb4\x01\n\x13ReportStatusRequest\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x12(\n\rrelease_lease\x18\x03 \x01(\x08H\x01R\x0creleaseLease\x88\x01\x01\x42\n\n\x08_messageB\x10\n\x0e_release_lease\"\x16\n\x14ReportStatusResponse\"\xb8\x02\n\x11GetReportResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x45\n\x06labels\x18\x02 \x03(\x0b\x32-.jumpstarter.v1.GetReportResponse.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x03 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x12M\n\x15\x61lternative_endpoints\x18\x04 \x03(\x0b\x32\x18.jumpstarter.v1.EndpointR\x14\x61lternativeEndpoints\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xa5\x01\n\x08\x45ndpoint\x12\x1a\n\x08\x65ndpoint\x18\x01 \x01(\tR\x08\x65ndpoint\x12 \n\x0b\x63\x65rtificate\x18\x02 \x01(\tR\x0b\x63\x65rtificate\x12-\n\x12\x63lient_certificate\x18\x03 \x01(\tR\x11\x63lientCertificate\x12,\n\x12\x63lient_private_key\x18\x04 \x01(\tR\x10\x63lientPrivateKey\"k\n\x11\x44riverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"X\n\x12\x44riverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"t\n\x1aStreamingDriverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"a\n\x1bStreamingDriverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"\xa0\x01\n\x11LogStreamResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x1a\n\x08severity\x18\x02 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x36\n\x06source\x18\x04 \x01(\x0e\x32\x19.jumpstarter.v1.LogSourceH\x00R\x06source\x88\x01\x01\x42\t\n\x07_source\"\x0e\n\x0cResetRequest\"\x0f\n\rResetResponse\"%\n\x0fGetLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x93\x03\n\x10GetLeaseResponse\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\x12>\n\nbegin_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\tbeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01R\x07\x65ndTime\x88\x01\x01\x12(\n\rexporter_uuid\x18\x05 \x01(\tH\x02R\x0c\x65xporterUuid\x88\x01\x01\x12\x39\n\nconditions\x18\x06 \x03(\x0b\x32\x19.jumpstarter.v1.ConditionR\nconditionsB\r\n\x0b_begin_timeB\x0b\n\t_end_timeB\x10\n\x0e_exporter_uuid\"\x87\x01\n\x13RequestLeaseRequest\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\"*\n\x14RequestLeaseResponse\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\")\n\x13ReleaseLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x16\n\x14ReleaseLeaseResponse\"\x13\n\x11ListLeasesRequest\"*\n\x12ListLeasesResponse\x12\x14\n\x05names\x18\x01 \x03(\tR\x05names\"\x12\n\x10GetStatusRequest\"v\n\x11GetStatusResponse\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message\"\x13\n\x11\x45ndSessionRequest\"Y\n\x12\x45ndSessionResponse\x12\x18\n\x07success\x18\x01 \x01(\x08R\x07success\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message2\x92\x07\n\x11\x43ontrollerService\x12M\n\x08Register\x12\x1f.jumpstarter.v1.RegisterRequest\x1a .jumpstarter.v1.RegisterResponse\x12S\n\nUnregister\x12!.jumpstarter.v1.UnregisterRequest\x1a\".jumpstarter.v1.UnregisterResponse\x12Y\n\x0cReportStatus\x12#.jumpstarter.v1.ReportStatusRequest\x1a$.jumpstarter.v1.ReportStatusResponse\x12I\n\x06Listen\x12\x1d.jumpstarter.v1.ListenRequest\x1a\x1e.jumpstarter.v1.ListenResponse0\x01\x12I\n\x06Status\x12\x1d.jumpstarter.v1.StatusRequest\x1a\x1e.jumpstarter.v1.StatusResponse0\x01\x12\x41\n\x04\x44ial\x12\x1b.jumpstarter.v1.DialRequest\x1a\x1c.jumpstarter.v1.DialResponse\x12K\n\x0b\x41uditStream\x12\".jumpstarter.v1.AuditStreamRequest\x1a\x16.google.protobuf.Empty(\x01\x12M\n\x08GetLease\x12\x1f.jumpstarter.v1.GetLeaseRequest\x1a .jumpstarter.v1.GetLeaseResponse\x12Y\n\x0cRequestLease\x12#.jumpstarter.v1.RequestLeaseRequest\x1a$.jumpstarter.v1.RequestLeaseResponse\x12Y\n\x0cReleaseLease\x12#.jumpstarter.v1.ReleaseLeaseRequest\x1a$.jumpstarter.v1.ReleaseLeaseResponse\x12S\n\nListLeases\x12!.jumpstarter.v1.ListLeasesRequest\x1a\".jumpstarter.v1.ListLeasesResponse2\xd7\x04\n\x0f\x45xporterService\x12\x46\n\tGetReport\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.GetReportResponse\x12S\n\nDriverCall\x12!.jumpstarter.v1.DriverCallRequest\x1a\".jumpstarter.v1.DriverCallResponse\x12p\n\x13StreamingDriverCall\x12*.jumpstarter.v1.StreamingDriverCallRequest\x1a+.jumpstarter.v1.StreamingDriverCallResponse0\x01\x12H\n\tLogStream\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.LogStreamResponse0\x01\x12\x44\n\x05Reset\x12\x1c.jumpstarter.v1.ResetRequest\x1a\x1d.jumpstarter.v1.ResetResponse\x12P\n\tGetStatus\x12 .jumpstarter.v1.GetStatusRequest\x1a!.jumpstarter.v1.GetStatusResponse\x12S\n\nEndSession\x12!.jumpstarter.v1.EndSessionRequest\x1a\".jumpstarter.v1.EndSessionResponseB\x7f\n\x12\x63om.jumpstarter.v1B\x10JumpstarterProtoP\x01\xa2\x02\x03JXX\xaa\x02\x0eJumpstarter.V1\xca\x02\x0eJumpstarter\\V1\xe2\x02\x1aJumpstarter\\V1\\GPBMetadata\xea\x02\x0fJumpstarter::V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -76,56 +76,56 @@ _globals['_DIALRESPONSE']._serialized_end=1469 _globals['_AUDITSTREAMREQUEST']._serialized_start=1472 _globals['_AUDITSTREAMREQUEST']._serialized_end=1633 - _globals['_REPORTSTATUSREQUEST']._serialized_start=1635 - _globals['_REPORTSTATUSREQUEST']._serialized_end=1755 - _globals['_REPORTSTATUSRESPONSE']._serialized_start=1757 - _globals['_REPORTSTATUSRESPONSE']._serialized_end=1779 - _globals['_GETREPORTRESPONSE']._serialized_start=1782 - _globals['_GETREPORTRESPONSE']._serialized_end=2094 + _globals['_REPORTSTATUSREQUEST']._serialized_start=1636 + _globals['_REPORTSTATUSREQUEST']._serialized_end=1816 + _globals['_REPORTSTATUSRESPONSE']._serialized_start=1818 + _globals['_REPORTSTATUSRESPONSE']._serialized_end=1840 + _globals['_GETREPORTRESPONSE']._serialized_start=1843 + _globals['_GETREPORTRESPONSE']._serialized_end=2155 _globals['_GETREPORTRESPONSE_LABELSENTRY']._serialized_start=391 _globals['_GETREPORTRESPONSE_LABELSENTRY']._serialized_end=448 - _globals['_ENDPOINT']._serialized_start=2097 - _globals['_ENDPOINT']._serialized_end=2262 - _globals['_DRIVERCALLREQUEST']._serialized_start=2264 - _globals['_DRIVERCALLREQUEST']._serialized_end=2371 - _globals['_DRIVERCALLRESPONSE']._serialized_start=2373 - _globals['_DRIVERCALLRESPONSE']._serialized_end=2461 - _globals['_STREAMINGDRIVERCALLREQUEST']._serialized_start=2463 - _globals['_STREAMINGDRIVERCALLREQUEST']._serialized_end=2579 - _globals['_STREAMINGDRIVERCALLRESPONSE']._serialized_start=2581 - _globals['_STREAMINGDRIVERCALLRESPONSE']._serialized_end=2678 - _globals['_LOGSTREAMRESPONSE']._serialized_start=2681 - _globals['_LOGSTREAMRESPONSE']._serialized_end=2841 - _globals['_RESETREQUEST']._serialized_start=2843 - _globals['_RESETREQUEST']._serialized_end=2857 - _globals['_RESETRESPONSE']._serialized_start=2859 - _globals['_RESETRESPONSE']._serialized_end=2874 - _globals['_GETLEASEREQUEST']._serialized_start=2876 - _globals['_GETLEASEREQUEST']._serialized_end=2913 - _globals['_GETLEASERESPONSE']._serialized_start=2916 - _globals['_GETLEASERESPONSE']._serialized_end=3319 - _globals['_REQUESTLEASEREQUEST']._serialized_start=3322 - _globals['_REQUESTLEASEREQUEST']._serialized_end=3457 - _globals['_REQUESTLEASERESPONSE']._serialized_start=3459 - _globals['_REQUESTLEASERESPONSE']._serialized_end=3501 - _globals['_RELEASELEASEREQUEST']._serialized_start=3503 - _globals['_RELEASELEASEREQUEST']._serialized_end=3544 - _globals['_RELEASELEASERESPONSE']._serialized_start=3546 - _globals['_RELEASELEASERESPONSE']._serialized_end=3568 - _globals['_LISTLEASESREQUEST']._serialized_start=3570 - _globals['_LISTLEASESREQUEST']._serialized_end=3589 - _globals['_LISTLEASESRESPONSE']._serialized_start=3591 - _globals['_LISTLEASESRESPONSE']._serialized_end=3633 - _globals['_GETSTATUSREQUEST']._serialized_start=3635 - _globals['_GETSTATUSREQUEST']._serialized_end=3653 - _globals['_GETSTATUSRESPONSE']._serialized_start=3655 - _globals['_GETSTATUSRESPONSE']._serialized_end=3773 - _globals['_ENDSESSIONREQUEST']._serialized_start=3775 - _globals['_ENDSESSIONREQUEST']._serialized_end=3794 - _globals['_ENDSESSIONRESPONSE']._serialized_start=3796 - _globals['_ENDSESSIONRESPONSE']._serialized_end=3885 - _globals['_CONTROLLERSERVICE']._serialized_start=3888 - _globals['_CONTROLLERSERVICE']._serialized_end=4802 - _globals['_EXPORTERSERVICE']._serialized_start=4805 - _globals['_EXPORTERSERVICE']._serialized_end=5404 + _globals['_ENDPOINT']._serialized_start=2158 + _globals['_ENDPOINT']._serialized_end=2323 + _globals['_DRIVERCALLREQUEST']._serialized_start=2325 + _globals['_DRIVERCALLREQUEST']._serialized_end=2432 + _globals['_DRIVERCALLRESPONSE']._serialized_start=2434 + _globals['_DRIVERCALLRESPONSE']._serialized_end=2522 + _globals['_STREAMINGDRIVERCALLREQUEST']._serialized_start=2524 + _globals['_STREAMINGDRIVERCALLREQUEST']._serialized_end=2640 + _globals['_STREAMINGDRIVERCALLRESPONSE']._serialized_start=2642 + _globals['_STREAMINGDRIVERCALLRESPONSE']._serialized_end=2739 + _globals['_LOGSTREAMRESPONSE']._serialized_start=2742 + _globals['_LOGSTREAMRESPONSE']._serialized_end=2902 + _globals['_RESETREQUEST']._serialized_start=2904 + _globals['_RESETREQUEST']._serialized_end=2918 + _globals['_RESETRESPONSE']._serialized_start=2920 + _globals['_RESETRESPONSE']._serialized_end=2935 + _globals['_GETLEASEREQUEST']._serialized_start=2937 + _globals['_GETLEASEREQUEST']._serialized_end=2974 + _globals['_GETLEASERESPONSE']._serialized_start=2977 + _globals['_GETLEASERESPONSE']._serialized_end=3380 + _globals['_REQUESTLEASEREQUEST']._serialized_start=3383 + _globals['_REQUESTLEASEREQUEST']._serialized_end=3518 + _globals['_REQUESTLEASERESPONSE']._serialized_start=3520 + _globals['_REQUESTLEASERESPONSE']._serialized_end=3562 + _globals['_RELEASELEASEREQUEST']._serialized_start=3564 + _globals['_RELEASELEASEREQUEST']._serialized_end=3605 + _globals['_RELEASELEASERESPONSE']._serialized_start=3607 + _globals['_RELEASELEASERESPONSE']._serialized_end=3629 + _globals['_LISTLEASESREQUEST']._serialized_start=3631 + _globals['_LISTLEASESREQUEST']._serialized_end=3650 + _globals['_LISTLEASESRESPONSE']._serialized_start=3652 + _globals['_LISTLEASESRESPONSE']._serialized_end=3694 + _globals['_GETSTATUSREQUEST']._serialized_start=3696 + _globals['_GETSTATUSREQUEST']._serialized_end=3714 + _globals['_GETSTATUSRESPONSE']._serialized_start=3716 + _globals['_GETSTATUSRESPONSE']._serialized_end=3834 + _globals['_ENDSESSIONREQUEST']._serialized_start=3836 + _globals['_ENDSESSIONREQUEST']._serialized_end=3855 + _globals['_ENDSESSIONRESPONSE']._serialized_start=3857 + _globals['_ENDSESSIONRESPONSE']._serialized_end=3946 + _globals['_CONTROLLERSERVICE']._serialized_start=3949 + _globals['_CONTROLLERSERVICE']._serialized_end=4863 + _globals['_EXPORTERSERVICE']._serialized_start=4866 + _globals['_EXPORTERSERVICE']._serialized_end=5465 # @@protoc_insertion_point(module_scope) diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi index 29bae1d5..9db0b676 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi @@ -314,18 +314,25 @@ class ReportStatusRequest(google.protobuf.message.Message): STATUS_FIELD_NUMBER: builtins.int MESSAGE_FIELD_NUMBER: builtins.int + RELEASE_LEASE_FIELD_NUMBER: builtins.int status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType message: builtins.str """Optional human-readable status message""" + release_lease: builtins.bool + """When true, controller should release the active lease""" def __init__( self, *, status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., message: builtins.str | None = ..., + release_lease: builtins.bool | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_message", b"_message", "message", b"message"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_message", b"_message", "message", b"message", "status", b"status"]) -> None: ... + def HasField(self, field_name: typing.Literal["_message", b"_message", "_release_lease", b"_release_lease", "message", b"message", "release_lease", b"release_lease"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_message", b"_message", "_release_lease", b"_release_lease", "message", b"message", "release_lease", b"release_lease", "status", b"status"]) -> None: ... + @typing.overload def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_release_lease", b"_release_lease"]) -> typing.Literal["release_lease"] | None: ... Global___ReportStatusRequest: typing_extensions.TypeAlias = ReportStatusRequest diff --git a/python/packages/jumpstarter/jumpstarter/client/core.py b/python/packages/jumpstarter/jumpstarter/client/core.py index d264d0f6..b360ba9f 100644 --- a/python/packages/jumpstarter/jumpstarter/client/core.py +++ b/python/packages/jumpstarter/jumpstarter/client/core.py @@ -81,7 +81,7 @@ def __post_init__(self): # add default handler if not self.logger.handlers: - handler = RichHandler() + handler = RichHandler(show_path=False) self.logger.addHandler(handler) async def get_status_async(self) -> ExporterStatus | None: @@ -122,14 +122,73 @@ async def check_exporter_status(self): if status not in ALLOWED_STATUSES: raise ExporterNotReady(f"Exporter status is {status}") + async def wait_for_lease_ready(self, timeout: float = 300.0) -> None: + """Wait for exporter to report LEASE_READY status. + + This polls GetStatus until the beforeLease hook completes. + Should be called after log streaming is started so hook output + can be displayed in real-time. + + Args: + timeout: Maximum time to wait in seconds (default: 5 minutes) + """ + import anyio + + poll_interval = 0.5 # seconds + elapsed = 0.0 + + self.logger.info("Waiting for exporter to be ready...") + while elapsed < timeout: + try: + status = await self.get_status_async() + self.logger.debug("GetStatus returned: %s", status) + except Exception as e: + # Connection error - keep trying + self.logger.debug("Error getting status, will retry: %s", e) + await anyio.sleep(poll_interval) + elapsed += poll_interval + continue + + if status is None: + # GetStatus not implemented - assume ready for backward compatibility + self.logger.debug("GetStatus not implemented, assuming ready") + return + + if status == ExporterStatus.LEASE_READY: + self.logger.info("Exporter ready, starting shell...") + return + elif status == ExporterStatus.BEFORE_LEASE_HOOK: + # Hook is running - this is expected, keep waiting + self.logger.debug("beforeLease hook is running...") + elif status == ExporterStatus.BEFORE_LEASE_HOOK_FAILED: + # Hook failed - log but continue (exporter may still be usable) + self.logger.warning("beforeLease hook failed") + return + elif status == ExporterStatus.AVAILABLE: + # Exporter is available but not yet leased - keep waiting + # This can happen if client connects before exporter receives lease assignment + self.logger.info("Exporter status: AVAILABLE (waiting for lease assignment)") + else: + # Other status - continue waiting + self.logger.info("Exporter status: %s (waiting...)", status) + + await anyio.sleep(poll_interval) + elapsed += poll_interval + + self.logger.warning("Timeout waiting for beforeLease hook to complete") + async def end_session_async(self) -> bool: - """End the current session and wait for afterLease hook to complete. + """End the current session and trigger the afterLease hook. - This signals the exporter to run the afterLease hook while keeping - the connection open, allowing the client to receive hook logs. + This signals the exporter to run the afterLease hook. The exporter will + release the lease after the hook completes, which may cause the connection + to be disrupted. Connection errors after EndSession is called are treated + as successful completion (the hook ran and the lease was released). Returns: - True if the session ended successfully, False if EndSession is not implemented. + True if the session end was triggered successfully or the connection + was disrupted (indicating the lease was released), False if EndSession + is not implemented. """ try: response = await self.stub.EndSession(jumpstarter_pb2.EndSessionRequest()) @@ -140,8 +199,66 @@ async def end_session_async(self) -> bool: if e.code() == StatusCode.UNIMPLEMENTED: self.logger.debug("EndSession not implemented") return False + # Connection errors (UNAVAILABLE, CANCELLED, UNKNOWN with "Stream removed") + # indicate the exporter has released the lease and restarted + if e.code() in (StatusCode.UNAVAILABLE, StatusCode.CANCELLED): + self.logger.debug("Connection disrupted during EndSession (lease released): %s", e.code()) + return True + if e.code() == StatusCode.UNKNOWN and "Stream removed" in str(e.details()): + self.logger.debug("Stream removed during EndSession (lease released)") + return True raise DriverError(f"Failed to end session: {e.details()}") from e + async def wait_for_hook_status(self, target_status: "ExporterStatus", timeout: float = 60.0) -> bool: + """Wait for exporter to reach a target status. + + Used after end_session_async() to wait for afterLease hook completion + while keeping the log stream open to receive hook logs. + + Args: + target_status: The status to wait for (typically AVAILABLE) + timeout: Maximum time to wait in seconds (default: 60 seconds) + + Returns: + True if target status was reached, False if timed out + """ + import anyio + + poll_interval = 0.5 # seconds + elapsed = 0.0 + + while elapsed < timeout: + try: + status = await self.get_status_async() + + if status is None: + # GetStatus not implemented - assume ready for backward compatibility + self.logger.debug("GetStatus not implemented, assuming hook complete") + return True + + if status == target_status: + self.logger.debug("Exporter reached target status: %s", status) + return True + + # Hook failed states also indicate completion + if status == ExporterStatus.AFTER_LEASE_HOOK_FAILED: + self.logger.warning("afterLease hook failed") + return True + + # Still running hook - keep waiting + self.logger.debug("Waiting for hook completion, current status: %s", status) + + except AioRpcError as e: + # Connection error - the hook may still be running but we can't confirm + self.logger.debug("Connection error while waiting for hook: %s", e.code()) + return False + + await anyio.sleep(poll_interval) + elapsed += poll_interval + + self.logger.warning("Timeout waiting for hook to complete (target: %s)", target_status) + return False + async def call_async(self, method, *args): """Make DriverCall by method name and arguments""" @@ -230,14 +347,47 @@ async def resource_async( async with forward_stream(ProgressStream(stream=stream), rstream): yield metadata.resource.model_dump(mode="json") - def __log(self, level: int, msg: str): - self.logger.log(level, msg) - @asynccontextmanager - async def log_stream_async(self): + async def log_stream_async(self, show_all_logs: bool = True): async def log_stream(): - async for response in self.stub.LogStream(empty_pb2.Empty()): - self.__log(logging.getLevelName(response.severity), response.message) + from jumpstarter.common import LogSource + + try: + async for response in self.stub.LogStream(empty_pb2.Empty()): + # Determine log source + if response.HasField("source"): + source = LogSource(response.source) + is_hook = source in (LogSource.BEFORE_LEASE_HOOK, LogSource.AFTER_LEASE_HOOK) + else: + source = LogSource.SYSTEM + is_hook = False + + # Filter: always show hooks, only show system logs if enabled + if is_hook or show_all_logs: + # Get severity level + severity = response.severity if response.severity else "INFO" + log_level = getattr(logging, severity, logging.INFO) + + # Route to appropriate logger based on source + if source == LogSource.BEFORE_LEASE_HOOK: + logger_name = "exporter:beforeLease" + elif source == LogSource.AFTER_LEASE_HOOK: + logger_name = "exporter:afterLease" + elif source == LogSource.DRIVER: + logger_name = "exporter:driver" + else: # SYSTEM + logger_name = "exporter:system" + + # Log through logger for RichHandler formatting + source_logger = logging.getLogger(logger_name) + source_logger.log(log_level, response.message) + except AioRpcError as e: + # Connection disrupted - exit gracefully without raising + # This can happen when the session ends or network issues occur + self.logger.debug("Log stream ended: %s", e.code()) + except Exception as e: + # Other errors - log and exit gracefully + self.logger.debug("Log stream error: %s", e) async with create_task_group() as tg: tg.start_soon(log_stream) diff --git a/python/packages/jumpstarter/jumpstarter/client/lease.py b/python/packages/jumpstarter/jumpstarter/client/lease.py index bc772419..9023a13e 100644 --- a/python/packages/jumpstarter/jumpstarter/client/lease.py +++ b/python/packages/jumpstarter/jumpstarter/client/lease.py @@ -244,16 +244,22 @@ async def serve_unix_async(self): async with TemporaryUnixListener(self.handle_async) as path: logger.debug("Serving Unix socket at %s", path) await self._wait_for_ready_connection(path) - # TODO: talk to the exporter to make sure it's ready.... (once we have the hooks) yield path async def _wait_for_ready_connection(self, path: str): + """Wait for the basic gRPC connection to be established. + + This only waits for the connection to be available. It does NOT wait + for beforeLease hooks to complete - that should be done after log + streaming is started so hook output can be displayed in real-time. + """ retries_left = 5 logger.info("Waiting for ready connection at %s", path) while True: try: with ExitStack() as stack: async with client_from_path(path, self.portal, stack, allow=self.allow, unsafe=self.unsafe) as _: + # Connection established break except AioRpcError as e: if retries_left > 1: diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index 6484c3be..61f2e969 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -314,6 +314,32 @@ async def _report_status(self, status: ExporterStatus, message: str = ""): except Exception as e: logger.error(f"Failed to update status: {e}") + async def _request_lease_release(self): + """Request the controller to release the current lease. + + Called after the afterLease hook completes to ensure the lease is + released even if the client disconnects unexpectedly. This moves + the lease release responsibility from the client to the exporter. + """ + if not self._lease_context or not self._lease_context.lease_name: + logger.debug("No active lease to release") + return + + try: + controller = await self._get_controller_stub() + await controller.ReportStatus( + jumpstarter_pb2.ReportStatusRequest( + status=ExporterStatus.AVAILABLE.to_proto(), + message="Lease released after afterLease hook", + release_lease=True, + ) + ) + logger.info("Requested controller to release lease %s", self._lease_context.lease_name) + except Exception as e: + logger.error("Failed to request lease release: %s", e) + # Fall through - the client can still release the lease as a fallback, + # or the lease will eventually expire + async def _unregister_with_controller(self): """Safely unregister from controller with timeout and error handling.""" if not (self._registered and self._unregister): @@ -392,12 +418,22 @@ async def _handle_end_session(self, lease_context: LeaseContext) -> None: logger.info("EndSession requested, running afterLease hook") try: + # Check if hook already started (via lease state transition) + if lease_context.after_lease_hook_started.is_set(): + logger.debug("afterLease hook already started via lease state transition, waiting for completion") + await lease_context.after_lease_hook_done.wait() + return + + # Mark hook as started to prevent duplicate execution + lease_context.after_lease_hook_started.set() + if self.hook_executor and lease_context.has_client(): with CancelScope(shield=True): await self.hook_executor.run_after_lease_hook( lease_context, self._report_status, self.stop, + self._request_lease_release, ) logger.info("afterLease hook completed via EndSession") else: @@ -475,30 +511,39 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseC # Link session to lease context for EndSession RPC session.lease_context = lease_scope - # Wait for before-lease hook to complete before processing client connections - logger.info("Waiting for before-lease hook to complete before accepting connections") - await lease_scope.before_lease_hook.wait() - logger.info("Before-lease hook completed, now accepting connections") + # Accept connections immediately - driver calls will be gated internally + # until the beforeLease hook completes. This allows LogStream to work + # during hook execution for real-time log streaming. + logger.info("Accepting connections (driver calls gated until beforeLease hook completes)") - # Sync status to session AFTER hook completes - this ensures we have LEASE_READY - # status from serve() rather than the default AVAILABLE + # Sync status to session - this is updated by the hook runner session.update_status(lease_scope.current_status, lease_scope.status_message) # Start task to handle EndSession requests (runs afterLease hook when client signals done) tg.start_soon(self._handle_end_session, lease_scope) - # Process client connections + # Process client connections immediately # Type: request is jumpstarter_pb2.ListenResponse with router_endpoint and router_token fields - async for request in listen_rx: - logger.info("Handling new connection request on lease %s", lease_name) - tg.start_soon( - self._handle_client_conn, - lease_scope.socket_path, - request.router_endpoint, - request.router_token, - self.tls, - self.grpc_options, - ) + try: + async for request in listen_rx: + logger.info("Handling new connection request on lease %s", lease_name) + tg.start_soon( + self._handle_client_conn, + lease_scope.socket_path, + request.router_endpoint, + request.router_token, + self.tls, + self.grpc_options, + ) + finally: + # Wait for afterLease hook to complete before closing the session + # This ensures the socket is still available for driver calls within the hook + # Shield this wait from cancellation so the hook can complete even during shutdown + with CancelScope(shield=True): + if lease_scope.end_session_requested.is_set(): + logger.debug("Waiting for afterLease hook to complete before closing session") + await lease_scope.after_lease_hook_done.wait() + logger.debug("afterLease hook completed, closing session") async def serve(self): # noqa: C901 """ @@ -527,17 +572,21 @@ async def serve(self): # noqa: C901 ) if lease_changed: # After-lease hook for the previous lease (lease name changed) - # Skip if already run via EndSession + # Skip if already started via EndSession if ( self.hook_executor and self._lease_context.has_client() - and not self._lease_context.after_lease_hook_done.is_set() + and not self._lease_context.after_lease_hook_started.is_set() ): + # Mark hook as started to prevent duplicate execution + self._lease_context.after_lease_hook_started.set() + with CancelScope(shield=True): await self.hook_executor.run_after_lease_hook( self._lease_context, self._report_status, self.stop, + self._request_lease_release, ) logger.info("Lease status changed, killing existing connections") @@ -585,20 +634,24 @@ async def serve(self): # noqa: C901 logger.info("Currently not leased") # After-lease hook when transitioning from leased to unleased - # Skip if already run via EndSession + # Skip if already started via EndSession if ( previous_leased and self.hook_executor and self._lease_context and self._lease_context.has_client() - and not self._lease_context.after_lease_hook_done.is_set() + and not self._lease_context.after_lease_hook_started.is_set() ): + # Mark hook as started to prevent duplicate execution + self._lease_context.after_lease_hook_started.set() + # Shield the after-lease hook from cancellation with CancelScope(shield=True): await self.hook_executor.run_after_lease_hook( self._lease_context, self._report_status, self.stop, + self._request_lease_release, ) # Clear lease scope for next lease diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py index b95f8c7b..0138ea54 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -2,13 +2,11 @@ import logging import os -import subprocess from collections.abc import Awaitable from dataclasses import dataclass from typing import TYPE_CHECKING, Callable, Literal import anyio -from anyio import open_process from jumpstarter.common import ExporterStatus, LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST @@ -93,7 +91,7 @@ async def _execute_hook( logger.debug("Hook command is empty, skipping") return - logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) + logger.debug("Executing hook: %s", command.strip().split("\n")[0][:100]) # Determine hook type from log source hook_type = "before_lease" if log_source == LogSource.BEFORE_LEASE_HOOK else "after_lease" @@ -152,9 +150,11 @@ async def _execute_hook_process( ) -> None: """Execute the hook process with the given environment and logging session. - Uses anyio for subprocess execution to be compatible with the anyio-based exporter. + Uses subprocess with a PTY to force line buffering in the subprocess, + ensuring logs stream in real-time rather than being block-buffered. """ - + import pty + import subprocess command = hook_config.script timeout = hook_config.timeout @@ -167,65 +167,141 @@ async def _execute_hook_process( # Route hook output logs to the client via the session's log stream with logging_session.context_log_source(__name__, log_source): + # Create a PTY pair - this forces line buffering in the subprocess + logger.info("Starting hook subprocess...") + master_fd, slave_fd = pty.openpty() + + # Track which fds are still open (use list for mutability in nested scope) + fds_open = {"master": True, "slave": True} + + process: subprocess.Popen | None = None try: - # Execute the hook command using shell via anyio - # Pass the command as a string to use shell mode - async with await open_process( - command, + # Use subprocess.Popen with the PTY slave as stdin/stdout/stderr + # This avoids the issues with os.fork() in async contexts + process = subprocess.Popen( + ["/bin/sh", "-c", command], + stdin=slave_fd, + stdout=slave_fd, + stderr=slave_fd, env=hook_env, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) as process: - output_lines: list[str] = [] - # Create hook label for log output (e.g., "beforeLease" or "afterLease") - hook_label = "beforeLease" if hook_type == "before_lease" else "afterLease" - - async def read_output() -> None: - """Read stdout line by line.""" - assert process.stdout is not None - buffer = b"" - async for chunk in process.stdout: - buffer += chunk + start_new_session=True, # Equivalent to os.setsid() + ) + # Close slave in parent - subprocess has it now + os.close(slave_fd) + fds_open["slave"] = False + + output_lines: list[str] = [] + + # Set master fd to non-blocking mode + import fcntl + flags = fcntl.fcntl(master_fd, fcntl.F_GETFL) + fcntl.fcntl(master_fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) + + async def read_pty_output() -> None: + """Read from PTY master fd line by line using non-blocking I/O.""" + buffer = b"" + while fds_open["master"]: + try: + # Wait for fd to be readable with timeout + with anyio.move_on_after(0.1): + await anyio.wait_readable(master_fd) + + # Read available data (non-blocking) + try: + chunk = os.read(master_fd, 4096) + if not chunk: + # EOF + break + buffer += chunk + except BlockingIOError: + # No data available right now, continue loop + continue + except OSError: + # PTY closed or error + break + + # Process complete lines while b"\n" in buffer: line, buffer = buffer.split(b"\n", 1) - line_decoded = line.decode().rstrip() - output_lines.append(line_decoded) - logger.info("[%s hook] %s", hook_label, line_decoded) - # Handle any remaining data without newline - if buffer: - line_decoded = buffer.decode().rstrip() - if line_decoded: - output_lines.append(line_decoded) - logger.info("[%s hook] %s", hook_label, line_decoded) - - # Use move_on_after for timeout - with anyio.move_on_after(timeout) as cancel_scope: - await read_output() - await process.wait() - - if cancel_scope.cancelled_caught: - timed_out = True - error_msg = f"Hook timed out after {timeout} seconds" - logger.error(error_msg) - # Terminate the process + line_decoded = line.decode(errors="replace").rstrip() + if line_decoded: + output_lines.append(line_decoded) + logger.info("%s", line_decoded) + + except OSError: + # PTY closed or read error + break + + # Handle any remaining data without newline + if buffer: + line_decoded = buffer.decode(errors="replace").rstrip() + if line_decoded: + output_lines.append(line_decoded) + logger.info("%s", line_decoded) + + async def wait_for_process() -> int: + """Wait for the subprocess to complete.""" + return await anyio.to_thread.run_sync( + process.wait, + abandon_on_cancel=True + ) + + # Use move_on_after for timeout + returncode: int | None = None + with anyio.move_on_after(timeout) as cancel_scope: + # Run output reading and process waiting concurrently + async with anyio.create_task_group() as tg: + tg.start_soon(read_pty_output) + returncode = await wait_for_process() + # Give a brief moment for any final output to be read + await anyio.sleep(0.2) + # Signal the read task to stop by marking fd as closed + # The read task checks fds_open["master"] in its loop + fds_open["master"] = False + # The task group will wait for read_pty_output to complete + # after it sees fds_open["master"] is False on next iteration + + if cancel_scope.cancelled_caught: + timed_out = True + error_msg = f"Hook timed out after {timeout} seconds" + logger.error(error_msg) + # Terminate the process + if process and process.poll() is None: process.terminate() # Give it a moment to terminate gracefully - with anyio.move_on_after(5): - await process.wait() + try: + with anyio.move_on_after(5): + await anyio.to_thread.run_sync(process.wait) + except Exception: + pass # Force kill if still running - if process.returncode is None: + if process.poll() is None: process.kill() + try: + process.wait() + except Exception: + pass - elif process.returncode == 0: - logger.info("Hook executed successfully") - return - else: - error_msg = f"Hook failed with exit code {process.returncode}" + elif returncode == 0: + logger.info("Hook executed successfully") + return + else: + error_msg = f"Hook failed with exit code {returncode}" except Exception as e: error_msg = f"Error executing hook: {e}" cause = e logger.error(error_msg, exc_info=True) + finally: + # Clean up the file descriptors (always close, fds_open tracks logical state) + try: + os.close(master_fd) + except OSError: + pass + try: + os.close(slave_fd) + except OSError: + pass # Handle failure if one occurred if error_msg is not None: @@ -366,6 +442,7 @@ async def run_after_lease_hook( lease_scope: "LeaseContext", report_status: Callable[["ExporterStatus", str], Awaitable[None]], shutdown: Callable[..., None], + request_lease_release: Callable[[], Awaitable[None]] | None = None, ) -> None: """Execute after-lease hook with full orchestration. @@ -375,11 +452,13 @@ async def run_after_lease_hook( - Sets up the hook executor with the session for logging - Executes the hook and handles errors - Triggers shutdown on critical failures (HookExecutionError) + - Requests lease release from controller after hook completes Args: lease_scope: LeaseScope containing session, socket_path, and client info report_status: Async callback to report status changes to controller shutdown: Callback to trigger exporter shutdown (accepts optional exit_code kwarg) + request_lease_release: Async callback to request lease release from controller """ try: # Verify lease scope is ready - for after-lease this should always be true @@ -436,3 +515,9 @@ async def run_after_lease_hook( f"afterLease hook failed: {e}", ) # Unexpected errors don't trigger shutdown - exporter remains available + + finally: + # Request lease release from controller after hook completes (success or failure) + # This ensures the lease is always released even if the client disconnects + if request_lease_release: + await request_lease_release() diff --git a/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py b/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py index d91dfda3..a2ecf511 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py @@ -29,6 +29,7 @@ class LeaseContext: socket_path: Unix socket path where the session is serving (set in handle_lease) before_lease_hook: Event that signals when before-lease hook completes end_session_requested: Event that signals when client requests end session (to run afterLease hook) + after_lease_hook_started: Event that signals when afterLease hook has started (prevents double execution) after_lease_hook_done: Event that signals when afterLease hook has completed client_name: Name of the client currently holding the lease (empty if unleased) current_status: Current exporter status (stored here for access before session is created) @@ -38,6 +39,7 @@ class LeaseContext: lease_name: str before_lease_hook: Event end_session_requested: Event = field(default_factory=Event) + after_lease_hook_started: Event = field(default_factory=Event) after_lease_hook_done: Event = field(default_factory=Event) session: "Session | None" = None socket_path: str = "" @@ -86,3 +88,19 @@ def update_status(self, status: ExporterStatus, message: str = ""): # Also update session if it exists if self.session: self.session.update_status(status, message) + + def drivers_ready(self) -> bool: + """Check if drivers are ready for use (beforeLease hook completed). + + Returns True if the beforeLease hook has completed and drivers can be accessed. + Used by Session to gate driver calls during hook execution. + """ + return self.before_lease_hook.is_set() + + async def wait_for_drivers(self) -> None: + """Wait for drivers to be ready (beforeLease hook to complete). + + This method blocks until the beforeLease hook completes, allowing + clients to connect early but wait for driver access. + """ + await self.before_lease_hook.wait() diff --git a/python/packages/jumpstarter/jumpstarter/exporter/logging.py b/python/packages/jumpstarter/jumpstarter/exporter/logging.py index 6a6e8dad..2f1ed12a 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/logging.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/logging.py @@ -46,7 +46,7 @@ def prepare(self, record): return jumpstarter_pb2.LogStreamResponse( uuid="", severity=record.levelname, - message=self.format(record), + message=record.getMessage(), # Use getMessage() directly to avoid formatter source=source.value, # Convert to proto value ) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/session.py b/python/packages/jumpstarter/jumpstarter/exporter/session.py index b10359ed..02fab875 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/session.py @@ -149,7 +149,8 @@ async def LogStream(self, request, context): try: yield self._logging_queue.popleft() except IndexError: - await sleep(0.5) + # Short polling interval for real-time log streaming + await sleep(0.05) def update_status(self, status: int | ExporterStatus, message: str = ""): """Update the current exporter status for the session.""" @@ -173,7 +174,7 @@ def context_log_source(self, logger_name: str, source: LogSource): async def GetStatus(self, request, context): """Get the current exporter status.""" - logger.debug("GetStatus() -> %s", self._current_status) + logger.info("GetStatus() -> %s", self._current_status) return jumpstarter_pb2.GetStatusResponse( status=self._current_status.to_proto(), message=self._status_message, From 1cff35416263796924a220d14f4324d0614ede73 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Wed, 21 Jan 2026 12:38:29 -0500 Subject: [PATCH 30/30] Use separate channel for hooks and clients to prevent timeout issues --- .../jumpstarter-cli/jumpstarter_cli/shell.py | 37 ++-- .../jumpstarter/v1/jumpstarter_pb2.py | 22 ++- .../jumpstarter/v1/jumpstarter_pb2.pyi | 30 +++ .../jumpstarter/v1/jumpstarter_pb2_grpc.py | 46 +++++ .../jumpstarter/v1/jumpstarter_pb2_grpc.pyi | 47 ++++- .../jumpstarter/jumpstarter/client/core.py | 124 +++++++++++- .../jumpstarter/exporter/exporter.py | 177 ++++++++++++------ .../jumpstarter/jumpstarter/exporter/hooks.py | 46 ++++- .../jumpstarter/exporter/lease_context.py | 4 + .../jumpstarter/exporter/session.py | 106 +++++++++-- 10 files changed, 538 insertions(+), 101 deletions(-) diff --git a/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py b/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py index fd1ef677..8d824c5e 100644 --- a/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py +++ b/python/packages/jumpstarter-cli/jumpstarter_cli/shell.py @@ -116,8 +116,9 @@ async def _run_shell_with_lease_async(lease, exporter_logs, config, command, can async with client.log_stream_async(show_all_logs=exporter_logs): # Wait for beforeLease hook to complete while logs are streaming # This allows hook output to be displayed in real-time + # Use streaming for real-time status updates instead of polling logger.debug("Waiting for beforeLease hook to complete...") - await client.wait_for_lease_ready() + await client.wait_for_lease_ready_streaming() logger.debug("wait_for_lease_ready returned, launching shell...") # Run the shell command @@ -127,23 +128,31 @@ async def _run_shell_with_lease_async(lease, exporter_logs, config, command, can # Shell has exited. Call EndSession to trigger afterLease hook # while keeping log stream open to receive hook logs. - # EndSession waits for the hook to complete on the server side, - # so we don't need additional client-side polling. + # EndSession returns immediately, so we need to wait for + # the hook to complete by monitoring the status. if lease.name and not cancel_scope.cancel_called: logger.info("Running afterLease hook (Ctrl+C to skip)...") try: - # EndSession triggers the afterLease hook and waits for completion - # Use a timeout to prevent hanging if the connection is disrupted - with anyio.move_on_after(300) as timeout_scope: # 5 minute timeout - success = await client.end_session_async() - if success: - logger.info("Lease released") - else: - logger.debug("EndSession not implemented, skipping hook") - if timeout_scope.cancelled_caught: - logger.warning("Timeout waiting for afterLease hook to complete") + # EndSession triggers the afterLease hook asynchronously + success = await client.end_session_async() + if success: + # Wait for hook to complete while log stream stays open + # This allows afterLease logs to be displayed in real-time + from jumpstarter.common import ExporterStatus + with anyio.move_on_after(300) as timeout_scope: # 5 minute timeout + completed = await client.wait_for_hook_status_streaming( + ExporterStatus.AVAILABLE, timeout=300.0 + ) + if completed: + logger.info("afterLease hook completed") + else: + logger.debug("Hook completion not confirmed") + if timeout_scope.cancelled_caught: + logger.warning("Timeout waiting for afterLease hook to complete") + else: + logger.debug("EndSession not implemented, skipping hook wait") except Exception as e: - logger.warning("Error during EndSession: %s", e) + logger.warning("Error during afterLease hook: %s", e) return exit_code diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py index 02b070cb..752b385b 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py @@ -30,7 +30,7 @@ from . import common_pb2 as jumpstarter_dot_v1_dot_common__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n jumpstarter/v1/jumpstarter.proto\x12\x0ejumpstarter.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\x1a\x1bjumpstarter/v1/common.proto\"\xd1\x01\n\x0fRegisterRequest\x12\x43\n\x06labels\x18\x01 \x03(\x0b\x32+.jumpstarter.v1.RegisterRequest.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x02 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xd2\x03\n\x14\x44riverInstanceReport\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12$\n\x0bparent_uuid\x18\x02 \x01(\tH\x00R\nparentUuid\x88\x01\x01\x12H\n\x06labels\x18\x03 \x03(\x0b\x32\x30.jumpstarter.v1.DriverInstanceReport.LabelsEntryR\x06labels\x12%\n\x0b\x64\x65scription\x18\x04 \x01(\tH\x01R\x0b\x64\x65scription\x88\x01\x01\x12m\n\x13methods_description\x18\x05 \x03(\x0b\x32<.jumpstarter.v1.DriverInstanceReport.MethodsDescriptionEntryR\x12methodsDescription\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x45\n\x17MethodsDescriptionEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x0e\n\x0c_parent_uuidB\x0e\n\x0c_description\"&\n\x10RegisterResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\"+\n\x11UnregisterRequest\x12\x16\n\x06reason\x18\x02 \x01(\tR\x06reason\"\x14\n\x12UnregisterResponse\".\n\rListenRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"\\\n\x0eListenResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\x0f\n\rStatusRequest\"\x91\x01\n\x0eStatusResponse\x12\x16\n\x06leased\x18\x01 \x01(\x08R\x06leased\x12\"\n\nlease_name\x18\x02 \x01(\tH\x00R\tleaseName\x88\x01\x01\x12$\n\x0b\x63lient_name\x18\x03 \x01(\tH\x01R\nclientName\x88\x01\x01\x42\r\n\x0b_lease_nameB\x0e\n\x0c_client_name\",\n\x0b\x44ialRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"Z\n\x0c\x44ialResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\xa1\x01\n\x12\x41uditStreamRequest\x12#\n\rexporter_uuid\x18\x01 \x01(\tR\x0c\x65xporterUuid\x12\x30\n\x14\x64river_instance_uuid\x18\x02 \x01(\tR\x12\x64riverInstanceUuid\x12\x1a\n\x08severity\x18\x03 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x04 \x01(\tR\x07message\"\xb4\x01\n\x13ReportStatusRequest\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x12(\n\rrelease_lease\x18\x03 \x01(\x08H\x01R\x0creleaseLease\x88\x01\x01\x42\n\n\x08_messageB\x10\n\x0e_release_lease\"\x16\n\x14ReportStatusResponse\"\xb8\x02\n\x11GetReportResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x45\n\x06labels\x18\x02 \x03(\x0b\x32-.jumpstarter.v1.GetReportResponse.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x03 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x12M\n\x15\x61lternative_endpoints\x18\x04 \x03(\x0b\x32\x18.jumpstarter.v1.EndpointR\x14\x61lternativeEndpoints\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xa5\x01\n\x08\x45ndpoint\x12\x1a\n\x08\x65ndpoint\x18\x01 \x01(\tR\x08\x65ndpoint\x12 \n\x0b\x63\x65rtificate\x18\x02 \x01(\tR\x0b\x63\x65rtificate\x12-\n\x12\x63lient_certificate\x18\x03 \x01(\tR\x11\x63lientCertificate\x12,\n\x12\x63lient_private_key\x18\x04 \x01(\tR\x10\x63lientPrivateKey\"k\n\x11\x44riverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"X\n\x12\x44riverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"t\n\x1aStreamingDriverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"a\n\x1bStreamingDriverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"\xa0\x01\n\x11LogStreamResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x1a\n\x08severity\x18\x02 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x36\n\x06source\x18\x04 \x01(\x0e\x32\x19.jumpstarter.v1.LogSourceH\x00R\x06source\x88\x01\x01\x42\t\n\x07_source\"\x0e\n\x0cResetRequest\"\x0f\n\rResetResponse\"%\n\x0fGetLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x93\x03\n\x10GetLeaseResponse\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\x12>\n\nbegin_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\tbeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01R\x07\x65ndTime\x88\x01\x01\x12(\n\rexporter_uuid\x18\x05 \x01(\tH\x02R\x0c\x65xporterUuid\x88\x01\x01\x12\x39\n\nconditions\x18\x06 \x03(\x0b\x32\x19.jumpstarter.v1.ConditionR\nconditionsB\r\n\x0b_begin_timeB\x0b\n\t_end_timeB\x10\n\x0e_exporter_uuid\"\x87\x01\n\x13RequestLeaseRequest\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\"*\n\x14RequestLeaseResponse\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\")\n\x13ReleaseLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x16\n\x14ReleaseLeaseResponse\"\x13\n\x11ListLeasesRequest\"*\n\x12ListLeasesResponse\x12\x14\n\x05names\x18\x01 \x03(\tR\x05names\"\x12\n\x10GetStatusRequest\"v\n\x11GetStatusResponse\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message\"\x13\n\x11\x45ndSessionRequest\"Y\n\x12\x45ndSessionResponse\x12\x18\n\x07success\x18\x01 \x01(\x08R\x07success\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message2\x92\x07\n\x11\x43ontrollerService\x12M\n\x08Register\x12\x1f.jumpstarter.v1.RegisterRequest\x1a .jumpstarter.v1.RegisterResponse\x12S\n\nUnregister\x12!.jumpstarter.v1.UnregisterRequest\x1a\".jumpstarter.v1.UnregisterResponse\x12Y\n\x0cReportStatus\x12#.jumpstarter.v1.ReportStatusRequest\x1a$.jumpstarter.v1.ReportStatusResponse\x12I\n\x06Listen\x12\x1d.jumpstarter.v1.ListenRequest\x1a\x1e.jumpstarter.v1.ListenResponse0\x01\x12I\n\x06Status\x12\x1d.jumpstarter.v1.StatusRequest\x1a\x1e.jumpstarter.v1.StatusResponse0\x01\x12\x41\n\x04\x44ial\x12\x1b.jumpstarter.v1.DialRequest\x1a\x1c.jumpstarter.v1.DialResponse\x12K\n\x0b\x41uditStream\x12\".jumpstarter.v1.AuditStreamRequest\x1a\x16.google.protobuf.Empty(\x01\x12M\n\x08GetLease\x12\x1f.jumpstarter.v1.GetLeaseRequest\x1a .jumpstarter.v1.GetLeaseResponse\x12Y\n\x0cRequestLease\x12#.jumpstarter.v1.RequestLeaseRequest\x1a$.jumpstarter.v1.RequestLeaseResponse\x12Y\n\x0cReleaseLease\x12#.jumpstarter.v1.ReleaseLeaseRequest\x1a$.jumpstarter.v1.ReleaseLeaseResponse\x12S\n\nListLeases\x12!.jumpstarter.v1.ListLeasesRequest\x1a\".jumpstarter.v1.ListLeasesResponse2\xd7\x04\n\x0f\x45xporterService\x12\x46\n\tGetReport\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.GetReportResponse\x12S\n\nDriverCall\x12!.jumpstarter.v1.DriverCallRequest\x1a\".jumpstarter.v1.DriverCallResponse\x12p\n\x13StreamingDriverCall\x12*.jumpstarter.v1.StreamingDriverCallRequest\x1a+.jumpstarter.v1.StreamingDriverCallResponse0\x01\x12H\n\tLogStream\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.LogStreamResponse0\x01\x12\x44\n\x05Reset\x12\x1c.jumpstarter.v1.ResetRequest\x1a\x1d.jumpstarter.v1.ResetResponse\x12P\n\tGetStatus\x12 .jumpstarter.v1.GetStatusRequest\x1a!.jumpstarter.v1.GetStatusResponse\x12S\n\nEndSession\x12!.jumpstarter.v1.EndSessionRequest\x1a\".jumpstarter.v1.EndSessionResponseB\x7f\n\x12\x63om.jumpstarter.v1B\x10JumpstarterProtoP\x01\xa2\x02\x03JXX\xaa\x02\x0eJumpstarter.V1\xca\x02\x0eJumpstarter\\V1\xe2\x02\x1aJumpstarter\\V1\\GPBMetadata\xea\x02\x0fJumpstarter::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n jumpstarter/v1/jumpstarter.proto\x12\x0ejumpstarter.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\x1a\x1bjumpstarter/v1/common.proto\"\xd1\x01\n\x0fRegisterRequest\x12\x43\n\x06labels\x18\x01 \x03(\x0b\x32+.jumpstarter.v1.RegisterRequest.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x02 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xd2\x03\n\x14\x44riverInstanceReport\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12$\n\x0bparent_uuid\x18\x02 \x01(\tH\x00R\nparentUuid\x88\x01\x01\x12H\n\x06labels\x18\x03 \x03(\x0b\x32\x30.jumpstarter.v1.DriverInstanceReport.LabelsEntryR\x06labels\x12%\n\x0b\x64\x65scription\x18\x04 \x01(\tH\x01R\x0b\x64\x65scription\x88\x01\x01\x12m\n\x13methods_description\x18\x05 \x03(\x0b\x32<.jumpstarter.v1.DriverInstanceReport.MethodsDescriptionEntryR\x12methodsDescription\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x45\n\x17MethodsDescriptionEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x0e\n\x0c_parent_uuidB\x0e\n\x0c_description\"&\n\x10RegisterResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\"+\n\x11UnregisterRequest\x12\x16\n\x06reason\x18\x02 \x01(\tR\x06reason\"\x14\n\x12UnregisterResponse\".\n\rListenRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"\\\n\x0eListenResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\x0f\n\rStatusRequest\"\x91\x01\n\x0eStatusResponse\x12\x16\n\x06leased\x18\x01 \x01(\x08R\x06leased\x12\"\n\nlease_name\x18\x02 \x01(\tH\x00R\tleaseName\x88\x01\x01\x12$\n\x0b\x63lient_name\x18\x03 \x01(\tH\x01R\nclientName\x88\x01\x01\x42\r\n\x0b_lease_nameB\x0e\n\x0c_client_name\",\n\x0b\x44ialRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"Z\n\x0c\x44ialResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\xa1\x01\n\x12\x41uditStreamRequest\x12#\n\rexporter_uuid\x18\x01 \x01(\tR\x0c\x65xporterUuid\x12\x30\n\x14\x64river_instance_uuid\x18\x02 \x01(\tR\x12\x64riverInstanceUuid\x12\x1a\n\x08severity\x18\x03 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x04 \x01(\tR\x07message\"\xb4\x01\n\x13ReportStatusRequest\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x12(\n\rrelease_lease\x18\x03 \x01(\x08H\x01R\x0creleaseLease\x88\x01\x01\x42\n\n\x08_messageB\x10\n\x0e_release_lease\"\x16\n\x14ReportStatusResponse\"\xb8\x02\n\x11GetReportResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x45\n\x06labels\x18\x02 \x03(\x0b\x32-.jumpstarter.v1.GetReportResponse.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x03 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x12M\n\x15\x61lternative_endpoints\x18\x04 \x03(\x0b\x32\x18.jumpstarter.v1.EndpointR\x14\x61lternativeEndpoints\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xa5\x01\n\x08\x45ndpoint\x12\x1a\n\x08\x65ndpoint\x18\x01 \x01(\tR\x08\x65ndpoint\x12 \n\x0b\x63\x65rtificate\x18\x02 \x01(\tR\x0b\x63\x65rtificate\x12-\n\x12\x63lient_certificate\x18\x03 \x01(\tR\x11\x63lientCertificate\x12,\n\x12\x63lient_private_key\x18\x04 \x01(\tR\x10\x63lientPrivateKey\"k\n\x11\x44riverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"X\n\x12\x44riverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"t\n\x1aStreamingDriverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"a\n\x1bStreamingDriverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"\xa0\x01\n\x11LogStreamResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x1a\n\x08severity\x18\x02 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x36\n\x06source\x18\x04 \x01(\x0e\x32\x19.jumpstarter.v1.LogSourceH\x00R\x06source\x88\x01\x01\x42\t\n\x07_source\"\x0e\n\x0cResetRequest\"\x0f\n\rResetResponse\"%\n\x0fGetLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x93\x03\n\x10GetLeaseResponse\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\x12>\n\nbegin_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\tbeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01R\x07\x65ndTime\x88\x01\x01\x12(\n\rexporter_uuid\x18\x05 \x01(\tH\x02R\x0c\x65xporterUuid\x88\x01\x01\x12\x39\n\nconditions\x18\x06 \x03(\x0b\x32\x19.jumpstarter.v1.ConditionR\nconditionsB\r\n\x0b_begin_timeB\x0b\n\t_end_timeB\x10\n\x0e_exporter_uuid\"\x87\x01\n\x13RequestLeaseRequest\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\"*\n\x14RequestLeaseResponse\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\")\n\x13ReleaseLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x16\n\x14ReleaseLeaseResponse\"\x13\n\x11ListLeasesRequest\"*\n\x12ListLeasesResponse\x12\x14\n\x05names\x18\x01 \x03(\tR\x05names\"\x12\n\x10GetStatusRequest\"v\n\x11GetStatusResponse\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message\"\x15\n\x13StreamStatusRequest\"y\n\x14StreamStatusResponse\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message\"\x13\n\x11\x45ndSessionRequest\"Y\n\x12\x45ndSessionResponse\x12\x18\n\x07success\x18\x01 \x01(\x08R\x07success\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message2\x92\x07\n\x11\x43ontrollerService\x12M\n\x08Register\x12\x1f.jumpstarter.v1.RegisterRequest\x1a .jumpstarter.v1.RegisterResponse\x12S\n\nUnregister\x12!.jumpstarter.v1.UnregisterRequest\x1a\".jumpstarter.v1.UnregisterResponse\x12Y\n\x0cReportStatus\x12#.jumpstarter.v1.ReportStatusRequest\x1a$.jumpstarter.v1.ReportStatusResponse\x12I\n\x06Listen\x12\x1d.jumpstarter.v1.ListenRequest\x1a\x1e.jumpstarter.v1.ListenResponse0\x01\x12I\n\x06Status\x12\x1d.jumpstarter.v1.StatusRequest\x1a\x1e.jumpstarter.v1.StatusResponse0\x01\x12\x41\n\x04\x44ial\x12\x1b.jumpstarter.v1.DialRequest\x1a\x1c.jumpstarter.v1.DialResponse\x12K\n\x0b\x41uditStream\x12\".jumpstarter.v1.AuditStreamRequest\x1a\x16.google.protobuf.Empty(\x01\x12M\n\x08GetLease\x12\x1f.jumpstarter.v1.GetLeaseRequest\x1a .jumpstarter.v1.GetLeaseResponse\x12Y\n\x0cRequestLease\x12#.jumpstarter.v1.RequestLeaseRequest\x1a$.jumpstarter.v1.RequestLeaseResponse\x12Y\n\x0cReleaseLease\x12#.jumpstarter.v1.ReleaseLeaseRequest\x1a$.jumpstarter.v1.ReleaseLeaseResponse\x12S\n\nListLeases\x12!.jumpstarter.v1.ListLeasesRequest\x1a\".jumpstarter.v1.ListLeasesResponse2\xb4\x05\n\x0f\x45xporterService\x12\x46\n\tGetReport\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.GetReportResponse\x12S\n\nDriverCall\x12!.jumpstarter.v1.DriverCallRequest\x1a\".jumpstarter.v1.DriverCallResponse\x12p\n\x13StreamingDriverCall\x12*.jumpstarter.v1.StreamingDriverCallRequest\x1a+.jumpstarter.v1.StreamingDriverCallResponse0\x01\x12H\n\tLogStream\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.LogStreamResponse0\x01\x12\x44\n\x05Reset\x12\x1c.jumpstarter.v1.ResetRequest\x1a\x1d.jumpstarter.v1.ResetResponse\x12P\n\tGetStatus\x12 .jumpstarter.v1.GetStatusRequest\x1a!.jumpstarter.v1.GetStatusResponse\x12[\n\x0cStreamStatus\x12#.jumpstarter.v1.StreamStatusRequest\x1a$.jumpstarter.v1.StreamStatusResponse0\x01\x12S\n\nEndSession\x12!.jumpstarter.v1.EndSessionRequest\x1a\".jumpstarter.v1.EndSessionResponseB\x7f\n\x12\x63om.jumpstarter.v1B\x10JumpstarterProtoP\x01\xa2\x02\x03JXX\xaa\x02\x0eJumpstarter.V1\xca\x02\x0eJumpstarter\\V1\xe2\x02\x1aJumpstarter\\V1\\GPBMetadata\xea\x02\x0fJumpstarter::V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -120,12 +120,16 @@ _globals['_GETSTATUSREQUEST']._serialized_end=3714 _globals['_GETSTATUSRESPONSE']._serialized_start=3716 _globals['_GETSTATUSRESPONSE']._serialized_end=3834 - _globals['_ENDSESSIONREQUEST']._serialized_start=3836 - _globals['_ENDSESSIONREQUEST']._serialized_end=3855 - _globals['_ENDSESSIONRESPONSE']._serialized_start=3857 - _globals['_ENDSESSIONRESPONSE']._serialized_end=3946 - _globals['_CONTROLLERSERVICE']._serialized_start=3949 - _globals['_CONTROLLERSERVICE']._serialized_end=4863 - _globals['_EXPORTERSERVICE']._serialized_start=4866 - _globals['_EXPORTERSERVICE']._serialized_end=5465 + _globals['_STREAMSTATUSREQUEST']._serialized_start=3836 + _globals['_STREAMSTATUSREQUEST']._serialized_end=3857 + _globals['_STREAMSTATUSRESPONSE']._serialized_start=3859 + _globals['_STREAMSTATUSRESPONSE']._serialized_end=3980 + _globals['_ENDSESSIONREQUEST']._serialized_start=3982 + _globals['_ENDSESSIONREQUEST']._serialized_end=4001 + _globals['_ENDSESSIONRESPONSE']._serialized_start=4003 + _globals['_ENDSESSIONRESPONSE']._serialized_end=4092 + _globals['_CONTROLLERSERVICE']._serialized_start=4095 + _globals['_CONTROLLERSERVICE']._serialized_end=5009 + _globals['_EXPORTERSERVICE']._serialized_start=5012 + _globals['_EXPORTERSERVICE']._serialized_end=5704 # @@protoc_insertion_point(module_scope) diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi index 9db0b676..30ad9aac 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi @@ -723,6 +723,36 @@ class GetStatusResponse(google.protobuf.message.Message): Global___GetStatusResponse: typing_extensions.TypeAlias = GetStatusResponse +@typing.final +class StreamStatusRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___StreamStatusRequest: typing_extensions.TypeAlias = StreamStatusRequest + +@typing.final +class StreamStatusResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + message: builtins.str + def __init__( + self, + *, + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_message", b"_message", "message", b"message"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_message", b"_message", "message", b"message", "status", b"status"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + +Global___StreamStatusResponse: typing_extensions.TypeAlias = StreamStatusResponse + @typing.final class EndSessionRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py index 8360da0c..85bf1906 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py @@ -572,6 +572,11 @@ def __init__(self, channel): request_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusRequest.SerializeToString, response_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusResponse.FromString, _registered_method=True) + self.StreamStatus = channel.unary_stream( + '/jumpstarter.v1.ExporterService/StreamStatus', + request_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.StreamStatusRequest.SerializeToString, + response_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.StreamStatusResponse.FromString, + _registered_method=True) self.EndSession = channel.unary_unary( '/jumpstarter.v1.ExporterService/EndSession', request_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.EndSessionRequest.SerializeToString, @@ -621,6 +626,15 @@ def GetStatus(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def StreamStatus(self, request, context): + """Stream status updates to the client + Returns immediately with current status, then streams updates as they occur + Use this instead of polling GetStatus for real-time status updates + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def EndSession(self, request, context): """End the current session, triggering the afterLease hook The client should keep the connection open to receive hook logs via LogStream @@ -663,6 +677,11 @@ def add_ExporterServiceServicer_to_server(servicer, server): request_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusRequest.FromString, response_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusResponse.SerializeToString, ), + 'StreamStatus': grpc.unary_stream_rpc_method_handler( + servicer.StreamStatus, + request_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.StreamStatusRequest.FromString, + response_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.StreamStatusResponse.SerializeToString, + ), 'EndSession': grpc.unary_unary_rpc_method_handler( servicer.EndSession, request_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.EndSessionRequest.FromString, @@ -843,6 +862,33 @@ def GetStatus(request, metadata, _registered_method=True) + @staticmethod + def StreamStatus(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/jumpstarter.v1.ExporterService/StreamStatus', + jumpstarter_dot_v1_dot_jumpstarter__pb2.StreamStatusRequest.SerializeToString, + jumpstarter_dot_v1_dot_jumpstarter__pb2.StreamStatusResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + @staticmethod def EndSession(request, target, diff --git a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi index 8352c5f4..ce4a917d 100644 --- a/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi +++ b/python/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi @@ -600,6 +600,22 @@ _ExporterServiceGetStatusType = typing_extensions.TypeVar( ], ) +_ExporterServiceStreamStatusType = typing_extensions.TypeVar( + '_ExporterServiceStreamStatusType', + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamStatusRequest, + jumpstarter.v1.jumpstarter_pb2.StreamStatusResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamStatusRequest, + jumpstarter.v1.jumpstarter_pb2.StreamStatusResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamStatusRequest, + jumpstarter.v1.jumpstarter_pb2.StreamStatusResponse, + ], +) + _ExporterServiceEndSessionType = typing_extensions.TypeVar( '_ExporterServiceEndSessionType', grpc.UnaryUnaryMultiCallable[ @@ -616,7 +632,7 @@ _ExporterServiceEndSessionType = typing_extensions.TypeVar( ], ) -class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _ExporterServiceDriverCallType, _ExporterServiceStreamingDriverCallType, _ExporterServiceLogStreamType, _ExporterServiceResetType, _ExporterServiceGetStatusType, _ExporterServiceEndSessionType]): +class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _ExporterServiceDriverCallType, _ExporterServiceStreamingDriverCallType, _ExporterServiceLogStreamType, _ExporterServiceResetType, _ExporterServiceGetStatusType, _ExporterServiceStreamStatusType, _ExporterServiceEndSessionType]): """A service a exporter can share locally to be used without a server Channel/Call credentials are used to authenticate the client, and routing to the right exporter """ @@ -647,6 +663,10 @@ class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _Exporte jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, ], + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamStatusRequest, + jumpstarter.v1.jumpstarter_pb2.StreamStatusResponse, + ], grpc.UnaryUnaryMultiCallable[ jumpstarter.v1.jumpstarter_pb2.EndSessionRequest, jumpstarter.v1.jumpstarter_pb2.EndSessionResponse, @@ -679,6 +699,10 @@ class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _Exporte jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamStatusRequest, + jumpstarter.v1.jumpstarter_pb2.StreamStatusResponse, + ], grpc.aio.UnaryUnaryMultiCallable[ jumpstarter.v1.jumpstarter_pb2.EndSessionRequest, jumpstarter.v1.jumpstarter_pb2.EndSessionResponse, @@ -698,6 +722,12 @@ class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _Exporte GetStatus: _ExporterServiceGetStatusType + StreamStatus: _ExporterServiceStreamStatusType + """Stream status updates to the client + Returns immediately with current status, then streams updates as they occur + Use this instead of polling GetStatus for real-time status updates + """ + EndSession: _ExporterServiceEndSessionType """End the current session, triggering the afterLease hook The client should keep the connection open to receive hook logs via LogStream @@ -729,6 +759,10 @@ ExporterServiceAsyncStub: typing_extensions.TypeAlias = ExporterServiceStub[ jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamStatusRequest, + jumpstarter.v1.jumpstarter_pb2.StreamStatusResponse, + ], grpc.aio.UnaryUnaryMultiCallable[ jumpstarter.v1.jumpstarter_pb2.EndSessionRequest, jumpstarter.v1.jumpstarter_pb2.EndSessionResponse, @@ -783,6 +817,17 @@ class ExporterServiceServicer(metaclass=abc.ABCMeta): context: _ServicerContext, ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetStatusResponse]]: ... + @abc.abstractmethod + def StreamStatus( + self, + request: jumpstarter.v1.jumpstarter_pb2.StreamStatusRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.StreamStatusResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.StreamStatusResponse]]: + """Stream status updates to the client + Returns immediately with current status, then streams updates as they occur + Use this instead of polling GetStatus for real-time status updates + """ + @abc.abstractmethod def EndSession( self, diff --git a/python/packages/jumpstarter/jumpstarter/client/core.py b/python/packages/jumpstarter/jumpstarter/client/core.py index b360ba9f..aa3c4475 100644 --- a/python/packages/jumpstarter/jumpstarter/client/core.py +++ b/python/packages/jumpstarter/jumpstarter/client/core.py @@ -122,6 +122,57 @@ async def check_exporter_status(self): if status not in ALLOWED_STATUSES: raise ExporterNotReady(f"Exporter status is {status}") + async def wait_for_lease_ready_streaming(self, timeout: float = 300.0) -> None: + """Wait for exporter to report LEASE_READY status using streaming. + + Uses StreamStatus RPC for real-time status updates instead of polling. + This is more efficient and provides immediate notification of status changes. + + Args: + timeout: Maximum time to wait in seconds (default: 5 minutes) + """ + import anyio + + self.logger.debug("Waiting for exporter to be ready (streaming)...") + seen_before_lease_hook = False + + try: + with anyio.move_on_after(timeout): + async for response in self.stub.StreamStatus(jumpstarter_pb2.StreamStatusRequest()): + status = ExporterStatus.from_proto(response.status) + self.logger.debug("StreamStatus received: %s", status) + + if status == ExporterStatus.LEASE_READY: + self.logger.info("Exporter ready, starting shell...") + return + elif status == ExporterStatus.BEFORE_LEASE_HOOK_FAILED: + self.logger.warning("beforeLease hook failed") + return + elif status == ExporterStatus.AFTER_LEASE_HOOK: + # Lease ended before becoming ready + raise DriverError("Lease ended before becoming ready") + elif status == ExporterStatus.BEFORE_LEASE_HOOK: + seen_before_lease_hook = True + self.logger.debug("beforeLease hook is running...") + elif status == ExporterStatus.AVAILABLE: + if seen_before_lease_hook: + # Lease ended - AVAILABLE after BEFORE_LEASE_HOOK indicates lease released + raise DriverError("Lease ended before becoming ready") + else: + # Initial AVAILABLE state - waiting for lease assignment + self.logger.debug("Exporter status: AVAILABLE (waiting for lease assignment)") + else: + self.logger.debug("Exporter status: %s (waiting...)", status) + + self.logger.warning("Timeout waiting for beforeLease hook to complete") + except AioRpcError as e: + if e.code() == StatusCode.UNIMPLEMENTED: + # StreamStatus not implemented, fall back to polling + self.logger.debug("StreamStatus not implemented, falling back to polling") + await self.wait_for_lease_ready(timeout) + else: + raise DriverError(f"Error streaming status: {e.details()}") from e + async def wait_for_lease_ready(self, timeout: float = 300.0) -> None: """Wait for exporter to report LEASE_READY status. @@ -129,6 +180,8 @@ async def wait_for_lease_ready(self, timeout: float = 300.0) -> None: Should be called after log streaming is started so hook output can be displayed in real-time. + Prefer wait_for_lease_ready_streaming() for real-time status updates. + Args: timeout: Maximum time to wait in seconds (default: 5 minutes) """ @@ -136,22 +189,25 @@ async def wait_for_lease_ready(self, timeout: float = 300.0) -> None: poll_interval = 0.5 # seconds elapsed = 0.0 + poll_count = 0 - self.logger.info("Waiting for exporter to be ready...") + self.logger.debug("Waiting for exporter to be ready...") while elapsed < timeout: + poll_count += 1 + self.logger.debug("[POLL %d] Calling GetStatus (elapsed: %.1fs)...", poll_count, elapsed) try: status = await self.get_status_async() - self.logger.debug("GetStatus returned: %s", status) + self.logger.debug("[POLL %d] GetStatus returned: %s", poll_count, status) except Exception as e: # Connection error - keep trying - self.logger.debug("Error getting status, will retry: %s", e) + self.logger.debug("[POLL %d] Error getting status, will retry: %s", poll_count, e) await anyio.sleep(poll_interval) elapsed += poll_interval continue if status is None: # GetStatus not implemented - assume ready for backward compatibility - self.logger.debug("GetStatus not implemented, assuming ready") + self.logger.debug("[POLL %d] GetStatus not implemented, assuming ready", poll_count) return if status == ExporterStatus.LEASE_READY: @@ -159,7 +215,7 @@ async def wait_for_lease_ready(self, timeout: float = 300.0) -> None: return elif status == ExporterStatus.BEFORE_LEASE_HOOK: # Hook is running - this is expected, keep waiting - self.logger.debug("beforeLease hook is running...") + self.logger.debug("[POLL %d] beforeLease hook is running...", poll_count) elif status == ExporterStatus.BEFORE_LEASE_HOOK_FAILED: # Hook failed - log but continue (exporter may still be usable) self.logger.warning("beforeLease hook failed") @@ -167,15 +223,16 @@ async def wait_for_lease_ready(self, timeout: float = 300.0) -> None: elif status == ExporterStatus.AVAILABLE: # Exporter is available but not yet leased - keep waiting # This can happen if client connects before exporter receives lease assignment - self.logger.info("Exporter status: AVAILABLE (waiting for lease assignment)") + self.logger.debug("[POLL %d] Exporter status: AVAILABLE (waiting for lease assignment)", poll_count) else: # Other status - continue waiting - self.logger.info("Exporter status: %s (waiting...)", status) + self.logger.debug("[POLL %d] Exporter status: %s (waiting...)", poll_count, status) + self.logger.debug("[POLL %d] Sleeping for %.1fs before next poll...", poll_count, poll_interval) await anyio.sleep(poll_interval) elapsed += poll_interval - self.logger.warning("Timeout waiting for beforeLease hook to complete") + self.logger.warning("Timeout waiting for beforeLease hook to complete (after %d polls)", poll_count) async def end_session_async(self) -> bool: """End the current session and trigger the afterLease hook. @@ -209,12 +266,61 @@ async def end_session_async(self) -> bool: return True raise DriverError(f"Failed to end session: {e.details()}") from e + async def wait_for_hook_status_streaming(self, target_status: "ExporterStatus", timeout: float = 60.0) -> bool: + """Wait for exporter to reach a target status using streaming. + + Uses StreamStatus RPC for real-time status updates instead of polling. + Used after end_session_async() to wait for afterLease hook completion + while keeping the log stream open to receive hook logs. + + Args: + target_status: The status to wait for (typically AVAILABLE) + timeout: Maximum time to wait in seconds (default: 60 seconds) + + Returns: + True if target status was reached, False if timed out or connection error + """ + import anyio + + self.logger.debug("Waiting for hook completion via StreamStatus (target: %s)", target_status) + + try: + with anyio.move_on_after(timeout): + async for response in self.stub.StreamStatus(jumpstarter_pb2.StreamStatusRequest()): + status = ExporterStatus.from_proto(response.status) + self.logger.debug("StreamStatus received: %s", status) + + if status == target_status: + self.logger.debug("Exporter reached target status: %s", status) + return True + + # Hook failed states also indicate completion + if status == ExporterStatus.AFTER_LEASE_HOOK_FAILED: + self.logger.warning("afterLease hook failed") + return True + + # Still running hook - keep waiting + self.logger.debug("Waiting for hook completion, current status: %s", status) + + self.logger.warning("Timeout waiting for hook to complete (target: %s)", target_status) + return False + except AioRpcError as e: + if e.code() == StatusCode.UNIMPLEMENTED: + # StreamStatus not implemented, fall back to polling + self.logger.debug("StreamStatus not implemented, falling back to polling") + return await self.wait_for_hook_status(target_status, timeout) + # Connection error - the hook may still be running but we can't confirm + self.logger.debug("Connection error while waiting for hook: %s", e.code()) + return False + async def wait_for_hook_status(self, target_status: "ExporterStatus", timeout: float = 60.0) -> bool: - """Wait for exporter to reach a target status. + """Wait for exporter to reach a target status using polling. Used after end_session_async() to wait for afterLease hook completion while keeping the log stream open to receive hook logs. + Prefer wait_for_hook_status_streaming() for real-time status updates. + Args: target_status: The status to wait for (typically AVAILABLE) timeout: Maximum time to wait in seconds (default: 60 seconds) diff --git a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py index 61f2e969..6313ad87 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -224,7 +224,9 @@ async def _retry_stream( while True: try: controller = await self._get_controller_stub() + logger.debug("%s stream connected to controller", stream_name) async for item in stream_factory(controller): + logger.debug("%s stream received item", stream_name) await send_tx.send(item) except Exception as e: if retries_left > 0: @@ -397,11 +399,13 @@ async def _handle_client_conn( or an error occurs. """ try: + logger.debug("Connecting to session socket at %s", path) async with await connect_unix(path) as stream: + logger.debug("Connected to session, bridging to router at %s", endpoint) async with connect_router_stream(endpoint, token, stream, tls_config, grpc_options): - pass + logger.debug("Router stream established, forwarding traffic") except Exception as e: - logger.info("failed to handle connection: {}".format(e)) + logger.warning("Failed to handle client connection: %s", e) async def _handle_end_session(self, lease_context: LeaseContext) -> None: """Handle EndSession requests from client. @@ -446,7 +450,7 @@ async def _handle_end_session(self, lease_context: LeaseContext) -> None: @asynccontextmanager async def session(self): - """Create and manage an exporter Session context. + """Create and manage an exporter Session context for initial registration. Yields: tuple[Session, str]: A tuple of (session, socket_path) for use in lease handling. @@ -467,6 +471,36 @@ async def session(self): # Yield both session and path for creating LeaseScope yield session, path + @asynccontextmanager + async def session_for_lease(self): + """Create and manage an exporter Session context with separate hook socket. + + This creates two Unix sockets: + - Main socket: For client gRPC connections (LogStream, driver calls, etc.) + - Hook socket: For hook subprocess j commands (isolated to prevent SSL corruption) + + The separation prevents SSL frame corruption that occurs when multiple gRPC + connections share the same socket simultaneously. + + Yields: + tuple[Session, str, str]: A tuple of (session, main_socket_path, hook_socket_path) + """ + with Session( + uuid=self.uuid, + labels=self.labels, + root_device=self.device_factory(), + ) as session: + # Create dual Unix sockets - one for clients, one for hooks + async with session.serve_unix_with_hook_socket_async() as (main_path, hook_path): + # Create a gRPC channel to the controller via the main socket + async with grpc.aio.secure_channel( + f"unix://{main_path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) + ) as channel: + # Register the exporter with the controller + await self._register_with_controller(channel) + # Yield session and both socket paths + yield session, main_path, hook_path + async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseContext) -> None: """Handle all incoming client connections for a lease. @@ -493,7 +527,10 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseC """ logger.info("Listening for incoming connection requests on lease %s", lease_name) - listen_tx, listen_rx = create_memory_object_stream[jumpstarter_pb2.ListenResponse]() + # Buffer Listen responses to avoid blocking when responses arrive before + # process_connections starts iterating. This prevents a race condition where + # the client dials immediately after lease acquisition but before the session is ready. + listen_tx, listen_rx = create_memory_object_stream[jumpstarter_pb2.ListenResponse](max_buffer_size=10) # Start listening for connection requests with retry logic tg.start_soon( @@ -504,43 +541,86 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseC ) # Create session for the lease duration and populate lease_scope - async with self.session() as (session, path): - # Populate the lease scope with session and socket path + # Uses dual sockets: main socket for clients, hook socket for j commands + async with self.session_for_lease() as (session, main_path, hook_path): + # Populate the lease scope with session and socket paths lease_scope.session = session - lease_scope.socket_path = path + lease_scope.socket_path = main_path + lease_scope.hook_socket_path = hook_path # Isolated socket for hook j commands # Link session to lease context for EndSession RPC session.lease_context = lease_scope + # Sync status from LeaseContext to Session (status may have been updated + # before session was created, e.g., LEASE_READY when no hooks configured) + session.update_status(lease_scope.current_status, lease_scope.status_message) + logger.debug("Session sockets: main=%s, hook=%s", main_path, hook_path) # Accept connections immediately - driver calls will be gated internally # until the beforeLease hook completes. This allows LogStream to work # during hook execution for real-time log streaming. logger.info("Accepting connections (driver calls gated until beforeLease hook completes)") - # Sync status to session - this is updated by the hook runner - session.update_status(lease_scope.current_status, lease_scope.status_message) + # Note: Status is managed by _report_status() which updates both LeaseContext + # and Session. The sync above handles the case where status was updated before + # session creation (e.g., LEASE_READY when no hooks configured). # Start task to handle EndSession requests (runs afterLease hook when client signals done) tg.start_soon(self._handle_end_session, lease_scope) - # Process client connections immediately + # Process client connections until lease ends + # The lease can end via: + # 1. listen_rx stream closing (controller stops sending) + # 2. lease_ended event being set (serve() detected lease status change) # Type: request is jumpstarter_pb2.ListenResponse with router_endpoint and router_token fields try: - async for request in listen_rx: - logger.info("Handling new connection request on lease %s", lease_name) - tg.start_soon( - self._handle_client_conn, - lease_scope.socket_path, - request.router_endpoint, - request.router_token, - self.tls, - self.grpc_options, - ) + async with create_task_group() as conn_tg: + async def wait_for_lease_end(): + """Wait for lease_ended event and cancel the connection loop.""" + await lease_scope.lease_ended.wait() + logger.info("Lease ended event received, stopping connection handling") + conn_tg.cancel_scope.cancel() + + async def process_connections(): + """Process incoming connection requests.""" + logger.debug("Starting to process connection requests from Listen stream") + async for request in listen_rx: + logger.info( + "Handling new connection request on lease %s (router=%s)", + lease_name, + request.router_endpoint, + ) + tg.start_soon( + self._handle_client_conn, + lease_scope.socket_path, + request.router_endpoint, + request.router_token, + self.tls, + self.grpc_options, + ) + + conn_tg.start_soon(wait_for_lease_end) + conn_tg.start_soon(process_connections) finally: - # Wait for afterLease hook to complete before closing the session + # Run afterLease hook before closing the session # This ensures the socket is still available for driver calls within the hook - # Shield this wait from cancellation so the hook can complete even during shutdown + # Shield from cancellation so the hook can complete even during shutdown with CancelScope(shield=True): - if lease_scope.end_session_requested.is_set(): + # Always run afterLease hook when handle_lease exits (session closing) + # Skip if already started via EndSession or lease state transition + if not lease_scope.after_lease_hook_started.is_set(): + lease_scope.after_lease_hook_started.set() + if self.hook_executor and lease_scope.has_client(): + logger.info("Running afterLease hook on session close") + await self.hook_executor.run_after_lease_hook( + lease_scope, + self._report_status, + self.stop, + self._request_lease_release, + ) + # Mark hook as done if we didn't run it (no hook configured or no client) + if not lease_scope.after_lease_hook_done.is_set(): + lease_scope.after_lease_hook_done.set() + else: + # Hook was already started elsewhere, wait for it to complete logger.debug("Waiting for afterLease hook to complete before closing session") await lease_scope.after_lease_hook_done.wait() logger.debug("afterLease hook completed, closing session") @@ -552,7 +632,8 @@ async def serve(self): # noqa: C901 # initial registration async with self.session(): pass - status_tx, status_rx = create_memory_object_stream[jumpstarter_pb2.StatusResponse]() + # Buffer status updates to avoid blocking during short processing gaps + status_tx, status_rx = create_memory_object_stream[jumpstarter_pb2.StatusResponse](max_buffer_size=5) async with create_task_group() as tg: self._tg = tg @@ -571,25 +652,18 @@ async def serve(self): # noqa: C901 and self._lease_context.lease_name != status.lease_name ) if lease_changed: - # After-lease hook for the previous lease (lease name changed) - # Skip if already started via EndSession - if ( - self.hook_executor - and self._lease_context.has_client() - and not self._lease_context.after_lease_hook_started.is_set() - ): - # Mark hook as started to prevent duplicate execution - self._lease_context.after_lease_hook_started.set() - - with CancelScope(shield=True): - await self.hook_executor.run_after_lease_hook( - self._lease_context, - self._report_status, - self.stop, - self._request_lease_release, - ) + # Lease name changed - need to restart with new lease + # Signal handle_lease() that the lease has ended so it can exit its loop + # and run the afterLease hook in its finally block (where session is still open) + logger.info("Lease status changed, signaling lease ended") + self._lease_context.lease_ended.set() + + # Wait for the afterLease hook to complete (run by handle_lease finally block) + # This ensures the session stays open for the hook subprocess + with CancelScope(shield=True): + await self._lease_context.after_lease_hook_done.wait() + logger.info("afterLease hook completed, stopping exporter") - logger.info("Lease status changed, killing existing connections") # Clear lease scope for next lease self._lease_context = None self.stop() @@ -634,25 +708,20 @@ async def serve(self): # noqa: C901 logger.info("Currently not leased") # After-lease hook when transitioning from leased to unleased - # Skip if already started via EndSession + # Signal handle_lease() that the lease has ended so it can exit its loop + # and run the afterLease hook in its finally block (where session is still open) if ( previous_leased - and self.hook_executor and self._lease_context and self._lease_context.has_client() - and not self._lease_context.after_lease_hook_started.is_set() ): - # Mark hook as started to prevent duplicate execution - self._lease_context.after_lease_hook_started.set() + logger.info("Lease ended, signaling handle_lease to run afterLease hook") + self._lease_context.lease_ended.set() - # Shield the after-lease hook from cancellation + # Wait for the hook to complete with CancelScope(shield=True): - await self.hook_executor.run_after_lease_hook( - self._lease_context, - self._report_status, - self.stop, - self._request_lease_release, - ) + await self._lease_context.after_lease_hook_done.wait() + logger.info("afterLease hook completed") # Clear lease scope for next lease self._lease_context = None diff --git a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py index 0138ea54..c76ef0d8 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -57,15 +57,23 @@ def _create_hook_env(self, lease_scope: "LeaseContext") -> dict[str, str]: """Create standardized hook environment variables. Args: - lease_scope: LeaseScope containing lease metadata and socket path + lease_scope: LeaseScope containing lease metadata and socket paths Returns: Dictionary of environment variables for hook execution + + Note: + Uses the hook_socket_path (if available) instead of the main socket_path + to prevent SSL frame corruption when hook j commands access the session + concurrently with client LogStream connections. """ hook_env = os.environ.copy() + # Use dedicated hook socket to prevent SSL corruption + # Falls back to main socket if hook socket not available (backward compatibility) + socket_path = lease_scope.hook_socket_path or lease_scope.socket_path hook_env.update( { - JUMPSTARTER_HOST: str(lease_scope.socket_path), + JUMPSTARTER_HOST: str(socket_path), JMP_DRIVERS_ALLOW: "UNSAFE", # Allow all drivers for local access "LEASE_NAME": lease_scope.lease_name, "CLIENT_NAME": lease_scope.client_name, @@ -98,6 +106,10 @@ async def _execute_hook( # Use existing session from lease_scope hook_env = self._create_hook_env(lease_scope) + logger.debug("Hook environment: JUMPSTARTER_HOST=%s, LEASE_NAME=%s, CLIENT_NAME=%s", + hook_env.get("JUMPSTARTER_HOST", "NOT_SET"), + hook_env.get("LEASE_NAME", "NOT_SET"), + hook_env.get("CLIENT_NAME", "NOT_SET")) return await self._execute_hook_process( hook_config, lease_scope, log_source, hook_env, lease_scope.session, hook_type @@ -170,6 +182,7 @@ async def _execute_hook_process( # Create a PTY pair - this forces line buffering in the subprocess logger.info("Starting hook subprocess...") master_fd, slave_fd = pty.openpty() + logger.debug("PTY created: master_fd=%d, slave_fd=%d", master_fd, slave_fd) # Track which fds are still open (use list for mutability in nested scope) fds_open = {"master": True, "slave": True} @@ -178,6 +191,7 @@ async def _execute_hook_process( try: # Use subprocess.Popen with the PTY slave as stdin/stdout/stderr # This avoids the issues with os.fork() in async contexts + logger.debug("Spawning subprocess with command: %s", command[:100]) process = subprocess.Popen( ["/bin/sh", "-c", command], stdin=slave_fd, @@ -186,9 +200,11 @@ async def _execute_hook_process( env=hook_env, start_new_session=True, # Equivalent to os.setsid() ) + logger.debug("Subprocess spawned with PID %d", process.pid) # Close slave in parent - subprocess has it now os.close(slave_fd) fds_open["slave"] = False + logger.debug("Closed slave_fd in parent") output_lines: list[str] = [] @@ -196,15 +212,27 @@ async def _execute_hook_process( import fcntl flags = fcntl.fcntl(master_fd, fcntl.F_GETFL) fcntl.fcntl(master_fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) + logger.debug("Master fd set to non-blocking") async def read_pty_output() -> None: """Read from PTY master fd line by line using non-blocking I/O.""" + logger.debug("read_pty_output task started") buffer = b"" + read_count = 0 + last_heartbeat = 0 + import time + start_time = time.monotonic() while fds_open["master"]: try: # Wait for fd to be readable with timeout with anyio.move_on_after(0.1): await anyio.wait_readable(master_fd) + read_count += 1 + # Log heartbeat every 2 seconds + elapsed = time.monotonic() - start_time + if elapsed - last_heartbeat >= 2.0: + logger.debug("read_pty_output: heartbeat at %.1fs, iterations=%d", elapsed, read_count) + last_heartbeat = elapsed # Read available data (non-blocking) try: @@ -241,18 +269,30 @@ async def read_pty_output() -> None: async def wait_for_process() -> int: """Wait for the subprocess to complete.""" - return await anyio.to_thread.run_sync( + logger.debug("wait_for_process: waiting for PID %d", process.pid) + result = await anyio.to_thread.run_sync( process.wait, abandon_on_cancel=True ) + logger.debug("wait_for_process: PID %d exited with code %d", process.pid, result) + return result # Use move_on_after for timeout returncode: int | None = None + logger.debug("Starting task group for PTY reading and process waiting (timeout=%d)", timeout) + + # Yield to event loop to ensure other tasks can progress + # This helps prevent race conditions in task scheduling + await anyio.sleep(0) + with anyio.move_on_after(timeout) as cancel_scope: # Run output reading and process waiting concurrently async with anyio.create_task_group() as tg: + logger.debug("Task group created, starting read_pty_output task") tg.start_soon(read_pty_output) + logger.debug("Calling wait_for_process...") returncode = await wait_for_process() + logger.debug("wait_for_process returned: %s", returncode) # Give a brief moment for any final output to be read await anyio.sleep(0.2) # Signal the read task to stop by marking fd as closed diff --git a/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py b/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py index a2ecf511..c67476db 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/lease_context.py @@ -27,10 +27,12 @@ class LeaseContext: lease_name: Name of the current lease assigned by the controller session: The Session object managing the device and gRPC services (set in handle_lease) socket_path: Unix socket path where the session is serving (set in handle_lease) + hook_socket_path: Separate Unix socket for hook j commands to avoid SSL frame corruption before_lease_hook: Event that signals when before-lease hook completes end_session_requested: Event that signals when client requests end session (to run afterLease hook) after_lease_hook_started: Event that signals when afterLease hook has started (prevents double execution) after_lease_hook_done: Event that signals when afterLease hook has completed + lease_ended: Event that signals when the lease has ended (from controller status update) client_name: Name of the client currently holding the lease (empty if unleased) current_status: Current exporter status (stored here for access before session is created) status_message: Message describing the current status @@ -41,8 +43,10 @@ class LeaseContext: end_session_requested: Event = field(default_factory=Event) after_lease_hook_started: Event = field(default_factory=Event) after_lease_hook_done: Event = field(default_factory=Event) + lease_ended: Event = field(default_factory=Event) # Signals lease has ended (from controller) session: "Session | None" = None socket_path: str = "" + hook_socket_path: str = "" # Separate socket for hook j commands to avoid SSL corruption client_name: str = field(default="") current_status: ExporterStatus = field(default=ExporterStatus.AVAILABLE) status_message: str = field(default="") diff --git a/python/packages/jumpstarter/jumpstarter/exporter/session.py b/python/packages/jumpstarter/jumpstarter/exporter/session.py index 02fab875..b00f9184 100644 --- a/python/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/python/packages/jumpstarter/jumpstarter/exporter/session.py @@ -94,12 +94,54 @@ async def serve_port_async(self, port): finally: await server.stop(grace=None) + @asynccontextmanager + async def serve_multi_port_async(self, *ports): + """Serve session on multiple ports simultaneously. + + This is used to create separate sockets for client connections and hook + j commands, preventing SSL frame corruption when both are active. + + Args: + *ports: One or more port specifications (e.g., "unix:///path/to/socket") + + Yields: + None - caller manages socket paths externally + """ + server = grpc.aio.server() + for port in ports: + server.add_insecure_port(port) + + jumpstarter_pb2_grpc.add_ExporterServiceServicer_to_server(self, server) + router_pb2_grpc.add_RouterServiceServicer_to_server(self, server) + + await server.start() + try: + yield + finally: + await server.stop(grace=None) + @asynccontextmanager async def serve_unix_async(self): with TemporarySocket() as path: async with self.serve_port_async(f"unix://{path}"): yield path + @asynccontextmanager + async def serve_unix_with_hook_socket_async(self): + """Serve session on two Unix sockets: one for clients, one for hooks. + + This creates separate sockets to prevent SSL frame corruption when + hook subprocess j commands access the session concurrently with + client LogStream connections. + + Yields: + tuple[str, str]: (main_socket_path, hook_socket_path) + """ + with TemporarySocket() as main_path: + with TemporarySocket() as hook_path: + async with self.serve_multi_port_async(f"unix://{main_path}", f"unix://{hook_path}"): + yield main_path, hook_path + @contextmanager def serve_unix(self): with start_blocking_portal() as portal: @@ -145,7 +187,7 @@ async def Stream(self, _request_iterator, context): await event.wait() async def LogStream(self, request, context): - while True: + while not context.done(): try: yield self._logging_queue.popleft() except IndexError: @@ -153,12 +195,16 @@ async def LogStream(self, request, context): await sleep(0.05) def update_status(self, status: int | ExporterStatus, message: str = ""): - """Update the current exporter status for the session.""" + """Update the current exporter status for the session and signal status change.""" if isinstance(status, int): self._current_status = ExporterStatus.from_proto(status) else: self._current_status = status self._status_message = message + # Signal status change for StreamStatus subscribers + self._status_update_event.set() + # Create a new event for the next status change + self._status_update_event = Event() def add_logger_source(self, logger_name: str, source: LogSource): """Add a log source mapping for a specific logger.""" @@ -180,13 +226,51 @@ async def GetStatus(self, request, context): message=self._status_message, ) + async def StreamStatus(self, request, context): + """Stream status updates to the client. + + Yields the current status immediately, then yields updates whenever + the status changes. This replaces polling GetStatus for real-time + status updates during hook execution. + + The stream continues until the client disconnects or the context is done. + """ + logger.info("StreamStatus() started") + + # Send current status immediately + yield jumpstarter_pb2.StreamStatusResponse( + status=self._current_status.to_proto(), + message=self._status_message, + ) + + # Stream updates as they occur + while not context.done(): + # Wait for status change event + current_event = self._status_update_event + await current_event.wait() + + # Send the updated status + logger.debug("StreamStatus() sending update: %s", self._current_status) + yield jumpstarter_pb2.StreamStatusResponse( + status=self._current_status.to_proto(), + message=self._status_message, + ) + async def EndSession(self, request, context): """End the current session and trigger the afterLease hook. - This is called by the client when it's done with the session but wants - to keep the connection open to receive logs from the afterLease hook. - The method signals the end_session_requested event and waits for the - afterLease hook to complete before returning. + This is called by the client when it's done with the session. The method + signals the end_session_requested event and returns immediately, allowing + the client to keep receiving logs via LogStream while the afterLease hook + runs asynchronously. + + The client should: + 1. Keep LogStream running after calling EndSession + 2. Use StreamStatus (or poll GetStatus) to detect when AVAILABLE status is reached + 3. Then disconnect + + This enables the session socket to stay open for controller monitoring and + supports exporter autonomy - the afterLease hook runs regardless of client state. Returns: EndSessionResponse with success status and optional message. @@ -201,15 +285,15 @@ async def EndSession(self, request, context): ) # Signal that the client wants to end the session + # The afterLease hook will run asynchronously via _handle_end_session logger.debug("Setting end_session_requested event") self.lease_context.end_session_requested.set() - # Wait for the afterLease hook to complete - logger.debug("Waiting for after_lease_hook_done event") - await self.lease_context.after_lease_hook_done.wait() - logger.info("EndSession complete, afterLease hook finished") + # Return immediately - don't wait for afterLease hook + # The client should continue receiving logs and monitor status for AVAILABLE + logger.info("EndSession signaled, afterLease hook will run asynchronously") return jumpstarter_pb2.EndSessionResponse( success=True, - message="Session ended and afterLease hook completed", + message="Session end triggered, afterLease hook running asynchronously", )