diff --git a/cmd/obol/llm.go b/cmd/obol/llm.go index 8f11ac5..7a5eada 100644 --- a/cmd/obol/llm.go +++ b/cmd/obol/llm.go @@ -4,6 +4,7 @@ import ( "bufio" "fmt" "os" + "sort" "strings" "github.com/ObolNetwork/obol-stack/internal/config" @@ -46,6 +47,39 @@ func llmCommand(cfg *config.Config) *cli.Command { return llm.ConfigureLLMSpy(cfg, provider, apiKey) }, }, + { + Name: "status", + Usage: "Show global llmspy provider status", + Action: func(c *cli.Context) error { + status, err := llm.GetProviderStatus(cfg) + if err != nil { + return err + } + + providers := make([]string, 0, len(status)) + for name := range status { + providers = append(providers, name) + } + sort.Strings(providers) + + fmt.Println("Global llmspy providers:") + fmt.Println() + fmt.Printf(" %-12s %-8s %-10s %s\n", "PROVIDER", "ENABLED", "API KEY", "ENV VAR") + for _, name := range providers { + s := status[name] + key := "n/a" + if s.APIKeyEnv != "" { + if s.HasAPIKey { + key = "set" + } else { + key = "missing" + } + } + fmt.Printf(" %-12s %-8t %-10s %s\n", name, s.Enabled, key, s.APIKeyEnv) + } + return nil + }, + }, }, } } diff --git a/cmd/obol/main.go b/cmd/obol/main.go index 203c662..1ba26e5 100644 --- a/cmd/obol/main.go +++ b/cmd/obol/main.go @@ -65,6 +65,7 @@ COMMANDS: LLM Gateway: llm configure Configure cloud AI provider in llmspy gateway + llm status Show global llmspy provider status Inference (x402 Pay-Per-Request): inference serve Start the x402 inference gateway diff --git a/internal/llm/llm.go b/internal/llm/llm.go index 2980069..97c2356 100644 --- a/internal/llm/llm.go +++ b/internal/llm/llm.go @@ -25,6 +25,13 @@ var providerEnvKeys = map[string]string{ "openai": "OPENAI_API_KEY", } +// ProviderStatus captures effective global llmspy provider state. +type ProviderStatus struct { + Enabled bool + HasAPIKey bool + APIKeyEnv string +} + // ConfigureLLMSpy enables a cloud provider in the llmspy gateway. // It patches the llms-secrets Secret with the API key, enables the provider // in the llmspy-config ConfigMap, and restarts the deployment. @@ -76,6 +83,73 @@ func ConfigureLLMSpy(cfg *config.Config, provider, apiKey string) error { return nil } +// GetProviderStatus reads llmspy ConfigMap + Secret and returns global provider status. +func GetProviderStatus(cfg *config.Config) (map[string]ProviderStatus, error) { + kubectlBinary := filepath.Join(cfg.BinDir, "kubectl") + kubeconfigPath := filepath.Join(cfg.ConfigDir, "kubeconfig.yaml") + if _, err := os.Stat(kubeconfigPath); os.IsNotExist(err) { + return nil, fmt.Errorf("cluster not running. Run 'obol stack up' first") + } + + llmsRaw, err := kubectlOutput(kubectlBinary, kubeconfigPath, + "get", "configmap", configMapName, "-n", namespace, "-o", "jsonpath={.data.llms\\.json}") + if err != nil { + return nil, err + } + var llmsConfig map[string]interface{} + if err := json.Unmarshal([]byte(llmsRaw), &llmsConfig); err != nil { + return nil, fmt.Errorf("failed to parse llms.json from ConfigMap: %w", err) + } + + status := make(map[string]ProviderStatus) + if providers, ok := llmsConfig["providers"].(map[string]interface{}); ok { + for name, raw := range providers { + enabled := false + if p, ok := raw.(map[string]interface{}); ok { + if v, ok := p["enabled"].(bool); ok { + enabled = v + } + } + keyEnv := providerEnvKeys[name] + status[name] = ProviderStatus{ + Enabled: enabled, + HasAPIKey: name == "ollama", + APIKeyEnv: keyEnv, + } + } + } + + secretRaw, err := kubectlOutput(kubectlBinary, kubeconfigPath, + "get", "secret", secretName, "-n", namespace, "-o", "json") + if err != nil { + return nil, err + } + var secret struct { + Data map[string]string `json:"data"` + } + if err := json.Unmarshal([]byte(secretRaw), &secret); err != nil { + return nil, fmt.Errorf("failed to parse llms secret: %w", err) + } + + for provider, envKey := range providerEnvKeys { + st := status[provider] + st.APIKeyEnv = envKey + if v, ok := secret.Data[envKey]; ok && strings.TrimSpace(v) != "" { + st.HasAPIKey = true + } + status[provider] = st + } + + if _, ok := status["ollama"]; !ok { + status["ollama"] = ProviderStatus{ + Enabled: true, + HasAPIKey: true, + } + } + + return status, nil +} + // enableProviderInConfigMap reads the llmspy-config ConfigMap, parses llms.json, // sets providers..enabled = true, and patches the ConfigMap back. func enableProviderInConfigMap(kubectlBinary, kubeconfigPath, provider string) error { @@ -150,3 +224,20 @@ func kubectl(binary, kubeconfig string, args ...string) error { } return nil } + +func kubectlOutput(binary, kubeconfig string, args ...string) (string, error) { + cmd := exec.Command(binary, args...) + cmd.Env = append(os.Environ(), fmt.Sprintf("KUBECONFIG=%s", kubeconfig)) + var stdout bytes.Buffer + cmd.Stdout = &stdout + var stderr bytes.Buffer + cmd.Stderr = &stderr + if err := cmd.Run(); err != nil { + errMsg := strings.TrimSpace(stderr.String()) + if errMsg != "" { + return "", fmt.Errorf("%w: %s", err, errMsg) + } + return "", err + } + return stdout.String(), nil +} diff --git a/internal/openclaw/import.go b/internal/openclaw/import.go index e48dd72..6c4c6cd 100644 --- a/internal/openclaw/import.go +++ b/internal/openclaw/import.go @@ -84,10 +84,10 @@ type openclawConfig struct { } type openclawProvider struct { - BaseURL string `json:"baseUrl"` - API string `json:"api"` - APIKey string `json:"apiKey"` - Models []openclawModel `json:"models"` + BaseURL string `json:"baseUrl"` + API string `json:"api"` + APIKey string `json:"apiKey"` + Models []openclawModel `json:"models"` } type openclawModel struct { @@ -135,15 +135,20 @@ func detectExistingConfigAt(home string) (*ImportResult, error) { fmt.Printf(" Note: unknown API type '%s' for provider '%s', will auto-detect\n", p.API, name) } ip := ImportedProvider{ - Name: name, - BaseURL: p.BaseURL, - API: sanitized, + Name: name, + BaseURL: p.BaseURL, + API: sanitized, + APIKeyEnvVar: defaultProviderAPIKeyEnvVar(name), } - // Only import literal API keys, skip env-var references like ${...} + // Import either a literal key (for secret extraction) or env-var reference. if p.APIKey != "" && !isEnvVarRef(p.APIKey) { ip.APIKey = p.APIKey } else if p.APIKey != "" { - fmt.Printf(" Note: provider '%s' uses an env-var reference for its API key (will need manual configuration)\n", name) + if envVar, ok := extractEnvVarName(p.APIKey); ok { + ip.APIKeyEnvVar = envVar + } else { + fmt.Printf(" Note: provider '%s' uses an env-var reference for its API key (will need manual configuration)\n", name) + } } for _, m := range p.Models { ip.Models = append(ip.Models, ImportedModel{ID: m.ID, Name: m.Name}) @@ -221,9 +226,6 @@ func TranslateToOverlayYAML(result *ImportResult) string { if p.APIKeyEnvVar != "" { b.WriteString(fmt.Sprintf(" apiKeyEnvVar: %s\n", p.APIKeyEnvVar)) } - if p.APIKey != "" { - b.WriteString(fmt.Sprintf(" apiKeyValue: %s\n", p.APIKey)) - } if len(p.Models) > 0 { b.WriteString(" models:\n") for _, m := range p.Models { @@ -244,20 +246,14 @@ func TranslateToOverlayYAML(result *ImportResult) string { if result.Channels.Telegram != nil { b.WriteString(" telegram:\n") b.WriteString(" enabled: true\n") - b.WriteString(fmt.Sprintf(" botToken: %s\n", result.Channels.Telegram.BotToken)) } if result.Channels.Discord != nil { b.WriteString(" discord:\n") b.WriteString(" enabled: true\n") - b.WriteString(fmt.Sprintf(" botToken: %s\n", result.Channels.Discord.BotToken)) } if result.Channels.Slack != nil { b.WriteString(" slack:\n") b.WriteString(" enabled: true\n") - b.WriteString(fmt.Sprintf(" botToken: %s\n", result.Channels.Slack.BotToken)) - if result.Channels.Slack.AppToken != "" { - b.WriteString(fmt.Sprintf(" appToken: %s\n", result.Channels.Slack.AppToken)) - } } b.WriteString("\n") } @@ -368,6 +364,46 @@ func sanitizeModelAPI(api string) string { return "" } +func defaultProviderAPIKeyEnvVar(provider string) string { + switch provider { + case "anthropic": + return "ANTHROPIC_API_KEY" + case "openai": + return "OPENAI_API_KEY" + case "ollama": + return "OLLAMA_API_KEY" + default: + var out []rune + for _, r := range strings.ToUpper(provider) { + if (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') { + out = append(out, r) + } else { + out = append(out, '_') + } + } + s := strings.Trim(string(out), "_") + if s == "" { + return "MODEL_API_KEY" + } + return s + "_API_KEY" + } +} + +func extractEnvVarName(s string) (string, bool) { + s = strings.TrimSpace(s) + if !strings.HasPrefix(s, "${") || !strings.HasSuffix(s, "}") { + return "", false + } + body := strings.TrimSuffix(strings.TrimPrefix(s, "${"), "}") + if body == "" { + return "", false + } + if i := strings.Index(body, ":"); i > 0 { + body = body[:i] + } + return body, body != "" +} + // isEnvVarRef returns true if the value looks like an environment variable reference (${...}) func isEnvVarRef(s string) bool { return strings.Contains(s, "${") diff --git a/internal/openclaw/import_test.go b/internal/openclaw/import_test.go index c5e3c8a..6ae1352 100644 --- a/internal/openclaw/import_test.go +++ b/internal/openclaw/import_test.go @@ -28,6 +28,44 @@ func TestIsEnvVarRef(t *testing.T) { } } +func TestExtractEnvVarName(t *testing.T) { + tests := []struct { + in string + want string + wantOK bool + }{ + {"${OPENAI_API_KEY}", "OPENAI_API_KEY", true}, + {"${OPENAI_API_KEY:default}", "OPENAI_API_KEY", true}, + {"OPENAI_API_KEY", "", false}, + {"${}", "", false}, + } + + for _, tt := range tests { + got, ok := extractEnvVarName(tt.in) + if ok != tt.wantOK || got != tt.want { + t.Errorf("extractEnvVarName(%q) = (%q, %v), want (%q, %v)", tt.in, got, ok, tt.want, tt.wantOK) + } + } +} + +func TestDefaultProviderAPIKeyEnvVar(t *testing.T) { + tests := []struct { + provider string + want string + }{ + {"anthropic", "ANTHROPIC_API_KEY"}, + {"openai", "OPENAI_API_KEY"}, + {"ollama", "OLLAMA_API_KEY"}, + {"my-provider", "MY_PROVIDER_API_KEY"}, + } + + for _, tt := range tests { + if got := defaultProviderAPIKeyEnvVar(tt.provider); got != tt.want { + t.Errorf("defaultProviderAPIKeyEnvVar(%q) = %q, want %q", tt.provider, got, tt.want) + } + } +} + func TestSanitizeModelAPI(t *testing.T) { // All valid values should pass through unchanged valid := []string{ @@ -212,7 +250,6 @@ func TestTranslateToOverlayYAML_ProviderWithModels(t *testing.T) { "anthropic:\n enabled: true", "baseUrl: https://api.anthropic.com/v1", "api: anthropic-messages", - "apiKeyValue: sk-ant-test", "- id: claude-opus-4-6", "name: Claude Opus 4.6", } @@ -267,15 +304,20 @@ func TestTranslateToOverlayYAML_Channels(t *testing.T) { got := TranslateToOverlayYAML(result) checks := []string{ - "telegram:\n enabled: true\n botToken: 123456:ABC", - "discord:\n enabled: true\n botToken: MTIz...", - "slack:\n enabled: true\n botToken: xoxb-test\n appToken: xapp-test", + "telegram:\n enabled: true", + "discord:\n enabled: true", + "slack:\n enabled: true", } for _, check := range checks { if !strings.Contains(got, check) { t.Errorf("YAML missing %q, got:\n%s", check, got) } } + for _, unexpected := range []string{"botToken:", "appToken:"} { + if strings.Contains(got, unexpected) { + t.Errorf("YAML should not contain %q, got:\n%s", unexpected, got) + } + } } func TestTranslateToOverlayYAML_FullConfig(t *testing.T) { @@ -391,6 +433,9 @@ func TestDetectExistingConfigAt_ValidConfig(t *testing.T) { if p.API != "anthropic-messages" { t.Errorf("Provider.API = %q, want %q", p.API, "anthropic-messages") } + if p.APIKeyEnvVar != "ANTHROPIC_API_KEY" { + t.Errorf("Provider.APIKeyEnvVar = %q, want %q", p.APIKeyEnvVar, "ANTHROPIC_API_KEY") + } if len(p.Models) != 1 || p.Models[0].ID != "claude-opus-4-6" { t.Errorf("Provider.Models = %v", p.Models) } @@ -423,6 +468,9 @@ func TestDetectExistingConfigAt_EnvVarKeySkipped(t *testing.T) { if result.Providers[0].APIKey != "" { t.Errorf("Provider.APIKey = %q, want empty (env-var should be skipped)", result.Providers[0].APIKey) } + if result.Providers[0].APIKeyEnvVar != "OPENAI_API_KEY" { + t.Errorf("Provider.APIKeyEnvVar = %q, want OPENAI_API_KEY", result.Providers[0].APIKeyEnvVar) + } } func TestDetectExistingConfigAt_ChannelImport(t *testing.T) { diff --git a/internal/openclaw/openclaw.go b/internal/openclaw/openclaw.go index 52ba627..b122cbf 100644 --- a/internal/openclaw/openclaw.go +++ b/internal/openclaw/openclaw.go @@ -34,8 +34,10 @@ type CloudProviderInfo struct { } const ( - appName = "openclaw" - defaultDomain = "obol.stack" + appName = "openclaw" + defaultDomain = "obol.stack" + userSecretsFileName = "values-obol.secrets.json" + userSecretsK8sSecretRef = "openclaw-user-secrets" // chartVersion pins the openclaw Helm chart version from the obol repo. // renovate: datasource=helm depName=openclaw registryUrl=https://obolnetwork.github.io/helm-charts/ chartVersion = "0.1.0" @@ -163,8 +165,7 @@ func Onboard(cfg *config.Config, opts OnboardOptions) error { // Push cloud API key to llmspy if a cloud provider was selected if cloudProvider != nil { if llmErr := llm.ConfigureLLMSpy(cfg, cloudProvider.Name, cloudProvider.APIKey); llmErr != nil { - fmt.Printf("Warning: failed to configure llmspy: %v\n", llmErr) - fmt.Println("You can configure it later with: obol llm configure") + return fmt.Errorf("failed to configure llmspy: %w", llmErr) } } } @@ -177,7 +178,12 @@ func Onboard(cfg *config.Config, opts OnboardOptions) error { // Write Obol Stack overlay values (httpRoute, provider config, eRPC, skills) hostname := fmt.Sprintf("openclaw-%s.%s", id, defaultDomain) namespace := fmt.Sprintf("%s-%s", appName, id) - overlay := generateOverlayValues(hostname, imported) + secretData := collectSensitiveData(imported) + if err := writeUserSecretsFile(deploymentDir, secretData); err != nil { + os.RemoveAll(deploymentDir) + return fmt.Errorf("failed to write OpenClaw secrets metadata: %w", err) + } + overlay := generateOverlayValues(hostname, imported, len(secretData) > 0) if err := os.WriteFile(filepath.Join(deploymentDir, "values-obol.yaml"), []byte(overlay), 0644); err != nil { os.RemoveAll(deploymentDir) return fmt.Errorf("failed to write overlay values: %w", err) @@ -198,6 +204,9 @@ func Onboard(cfg *config.Config, opts OnboardOptions) error { fmt.Printf("\nFiles created:\n") fmt.Printf(" - values-obol.yaml Obol Stack overlay (httpRoute, providers, eRPC)\n") fmt.Printf(" - helmfile.yaml Deployment configuration (chart: obol/openclaw v%s)\n", chartVersion) + if len(secretData) > 0 { + fmt.Printf(" - %s Local secret values (used to create %s in-cluster)\n", userSecretsFileName, userSecretsK8sSecretRef) + } if opts.Sync { fmt.Printf("\nDeploying to cluster...\n\n") @@ -240,6 +249,11 @@ func doSync(cfg *config.Config, id string) error { if _, err := os.Stat(helmfileBinary); os.IsNotExist(err) { return fmt.Errorf("helmfile not found at %s", helmfileBinary) } + namespace := fmt.Sprintf("%s-%s", appName, id) + + if err := applyUserSecretsIfPresent(cfg, namespace, deploymentDir); err != nil { + return fmt.Errorf("failed to sync OpenClaw user secrets: %w", err) + } fmt.Printf("Syncing OpenClaw: %s/%s\n", appName, id) fmt.Printf("Deployment directory: %s\n", deploymentDir) @@ -258,7 +272,6 @@ func doSync(cfg *config.Config, id string) error { return fmt.Errorf("helmfile sync failed: %w", err) } - namespace := fmt.Sprintf("%s-%s", appName, id) hostname := fmt.Sprintf("openclaw-%s.%s", id, defaultDomain) fmt.Printf("\nāœ“ OpenClaw synced successfully!\n") fmt.Printf(" Namespace: %s\n", namespace) @@ -271,6 +284,101 @@ func doSync(cfg *config.Config, id string) error { return nil } +func writeUserSecretsFile(deploymentDir string, secretData map[string]string) error { + path := filepath.Join(deploymentDir, userSecretsFileName) + if len(secretData) == 0 { + if err := os.Remove(path); err != nil && !os.IsNotExist(err) { + return err + } + return nil + } + + payload, err := json.MarshalIndent(secretData, "", " ") + if err != nil { + return err + } + return os.WriteFile(path, payload, 0600) +} + +func loadUserSecretsFile(deploymentDir string) (map[string]string, error) { + path := filepath.Join(deploymentDir, userSecretsFileName) + data, err := os.ReadFile(path) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, err + } + + var out map[string]string + if err := json.Unmarshal(data, &out); err != nil { + return nil, fmt.Errorf("invalid %s: %w", userSecretsFileName, err) + } + return out, nil +} + +func applyUserSecretsIfPresent(cfg *config.Config, namespace, deploymentDir string) error { + secretData, err := loadUserSecretsFile(deploymentDir) + if err != nil { + return err + } + if len(secretData) == 0 { + return nil + } + + kubeconfigPath := filepath.Join(cfg.ConfigDir, "kubeconfig.yaml") + kubectlBinary := filepath.Join(cfg.BinDir, "kubectl") + + if err := ensureNamespaceExists(kubectlBinary, kubeconfigPath, namespace); err != nil { + return err + } + + manifest := map[string]interface{}{ + "apiVersion": "v1", + "kind": "Secret", + "metadata": map[string]string{ + "name": userSecretsK8sSecretRef, + "namespace": namespace, + }, + "type": "Opaque", + "stringData": secretData, + } + raw, err := json.Marshal(manifest) + if err != nil { + return err + } + + cmd := exec.Command(kubectlBinary, "apply", "-f", "-") + cmd.Env = append(os.Environ(), fmt.Sprintf("KUBECONFIG=%s", kubeconfigPath)) + cmd.Stdin = bytes.NewReader(raw) + var stderr bytes.Buffer + cmd.Stderr = &stderr + if err := cmd.Run(); err != nil { + return fmt.Errorf("%w\n%s", err, stderr.String()) + } + return nil +} + +func ensureNamespaceExists(kubectlBinary, kubeconfigPath, namespace string) error { + getCmd := exec.Command(kubectlBinary, "get", "namespace", namespace) + getCmd.Env = append(os.Environ(), fmt.Sprintf("KUBECONFIG=%s", kubeconfigPath)) + if err := getCmd.Run(); err == nil { + return nil + } + + createCmd := exec.Command(kubectlBinary, "create", "namespace", namespace) + createCmd.Env = append(os.Environ(), fmt.Sprintf("KUBECONFIG=%s", kubeconfigPath)) + var stderr bytes.Buffer + createCmd.Stderr = &stderr + if err := createCmd.Run(); err != nil { + if strings.Contains(stderr.String(), "AlreadyExists") { + return nil + } + return fmt.Errorf("%w: %s", err, strings.TrimSpace(stderr.String())) + } + return nil +} + // copyWorkspaceToPod copies the local workspace directory into the OpenClaw pod's PVC. // This is non-fatal: failures print a warning and continue. func copyWorkspaceToPod(cfg *config.Config, id, workspaceDir string) { @@ -529,8 +637,7 @@ func Setup(cfg *config.Config, id string, _ SetupOptions) error { // Push cloud API key to llmspy if a cloud provider was selected if cloudProvider != nil { if llmErr := llm.ConfigureLLMSpy(cfg, cloudProvider.Name, cloudProvider.APIKey); llmErr != nil { - fmt.Printf("Warning: failed to configure llmspy: %v\n", llmErr) - fmt.Println("You can configure it later with: obol llm configure") + return fmt.Errorf("failed to configure llmspy: %w", llmErr) } } @@ -543,7 +650,11 @@ func Setup(cfg *config.Config, id string, _ SetupOptions) error { // Regenerate overlay values with the selected provider hostname := fmt.Sprintf("openclaw-%s.%s", id, defaultDomain) - overlay := generateOverlayValues(hostname, imported) + secretData := collectSensitiveData(imported) + if err := writeUserSecretsFile(deploymentDir, secretData); err != nil { + return fmt.Errorf("failed to write OpenClaw secrets metadata: %w", err) + } + overlay := generateOverlayValues(hostname, imported, len(secretData) > 0) overlayPath := filepath.Join(deploymentDir, "values-obol.yaml") if err := os.WriteFile(overlayPath, []byte(overlay), 0644); err != nil { return fmt.Errorf("failed to write overlay values: %w", err) @@ -827,9 +938,9 @@ func CLI(cfg *config.Config, id string, args []string) error { namespace := fmt.Sprintf("%s-%s", appName, id) if len(args) == 0 { - return fmt.Errorf("no openclaw command specified\n\nExamples:\n" + - " obol openclaw cli %s -- gateway health\n" + - " obol openclaw cli %s -- gateway call config.get\n" + + return fmt.Errorf("no openclaw command specified\n\nExamples:\n"+ + " obol openclaw cli %s -- gateway health\n"+ + " obol openclaw cli %s -- gateway call config.get\n"+ " obol openclaw cli %s -- doctor", id, id, id) } @@ -934,7 +1045,7 @@ func deploymentPath(cfg *config.Config, id string) string { // generateOverlayValues creates the Obol Stack-specific values overlay. // If imported is non-nil, provider/channel config from the import is used // instead of the default Ollama configuration. -func generateOverlayValues(hostname string, imported *ImportResult) string { +func generateOverlayValues(hostname string, imported *ImportResult, useExternalSecrets bool) string { var b strings.Builder b.WriteString(`# Obol Stack overlay values for OpenClaw @@ -1015,6 +1126,15 @@ initJob: enabled: false `) + if useExternalSecrets { + b.WriteString(` +# Load instance-local credentials (provider/channel tokens) from a dedicated Secret +secrets: + extraEnvFromSecrets: + - ` + userSecretsK8sSecretRef + ` +`) + } + return b.String() } @@ -1077,9 +1197,12 @@ func interactiveSetup(imported *ImportResult) (*ImportResult, *CloudProviderInfo if ollamaAvailable { fmt.Println("\nSelect a model provider:") - fmt.Println(" [1] Ollama (default, runs in-cluster)") - fmt.Println(" [2] OpenAI") - fmt.Println(" [3] Anthropic") + fmt.Println(" [1] Global Ollama via llmspy (default)") + fmt.Println(" [2] Global OpenAI via llmspy") + fmt.Println(" [3] Global Anthropic via llmspy") + fmt.Println(" [4] Direct OpenAI (instance override)") + fmt.Println(" [5] Direct Anthropic (instance override)") + fmt.Println(" [6] Custom OpenAI-compatible endpoint (instance override)") fmt.Print("\nChoice [1]: ") line, _ := reader.ReadString('\n') @@ -1090,7 +1213,7 @@ func interactiveSetup(imported *ImportResult) (*ImportResult, *CloudProviderInfo switch choice { case "1": - fmt.Println("Using Ollama (in-cluster) as default provider.") + fmt.Println("Using global Ollama route via llmspy.") return nil, nil, nil case "2": cloud, err := promptForCloudProvider(reader, "openai", "OpenAI", "gpt-5.2", "GPT-5.2") @@ -1106,16 +1229,37 @@ func interactiveSetup(imported *ImportResult) (*ImportResult, *CloudProviderInfo } result := buildLLMSpyRoutedOverlay(cloud) return result, cloud, nil + case "4": + result, err := promptForDirectProvider(reader, "openai", "OpenAI", "https://api.openai.com/v1", "openai-completions", "OPENAI_API_KEY", "gpt-5.2", "GPT-5.2") + if err != nil { + return nil, nil, err + } + return result, nil, nil + case "5": + result, err := promptForDirectProvider(reader, "anthropic", "Anthropic", "https://api.anthropic.com/v1", "anthropic-messages", "ANTHROPIC_API_KEY", "claude-opus-4-6", "Claude Opus 4.6") + if err != nil { + return nil, nil, err + } + return result, nil, nil + case "6": + result, err := promptForCustomProvider(reader) + if err != nil { + return nil, nil, err + } + return result, nil, nil default: - fmt.Printf("Unknown choice '%s', using Ollama defaults.\n", choice) + fmt.Printf("Unknown choice '%s', using global Ollama route.\n", choice) return nil, nil, nil } } - // Ollama not available — only offer cloud providers + // Ollama not available — offer cloud/global and direct overrides fmt.Println("\nSelect a model provider:") - fmt.Println(" [1] OpenAI") - fmt.Println(" [2] Anthropic") + fmt.Println(" [1] Global OpenAI via llmspy") + fmt.Println(" [2] Global Anthropic via llmspy") + fmt.Println(" [3] Direct OpenAI (instance override)") + fmt.Println(" [4] Direct Anthropic (instance override)") + fmt.Println(" [5] Custom OpenAI-compatible endpoint (instance override)") fmt.Print("\nChoice [1]: ") line, _ := reader.ReadString('\n') @@ -1139,6 +1283,24 @@ func interactiveSetup(imported *ImportResult) (*ImportResult, *CloudProviderInfo } result := buildLLMSpyRoutedOverlay(cloud) return result, cloud, nil + case "3": + result, err := promptForDirectProvider(reader, "openai", "OpenAI", "https://api.openai.com/v1", "openai-completions", "OPENAI_API_KEY", "gpt-5.2", "GPT-5.2") + if err != nil { + return nil, nil, err + } + return result, nil, nil + case "4": + result, err := promptForDirectProvider(reader, "anthropic", "Anthropic", "https://api.anthropic.com/v1", "anthropic-messages", "ANTHROPIC_API_KEY", "claude-opus-4-6", "Claude Opus 4.6") + if err != nil { + return nil, nil, err + } + return result, nil, nil + case "5": + result, err := promptForCustomProvider(reader) + if err != nil { + return nil, nil, err + } + return result, nil, nil default: return nil, nil, fmt.Errorf("unknown choice '%s'; please select a valid provider", choice) } @@ -1162,6 +1324,86 @@ func promptForCloudProvider(reader *bufio.Reader, name, display, modelID, modelN }, nil } +// promptForDirectProvider asks for direct-provider settings for an instance-local override. +func promptForDirectProvider(reader *bufio.Reader, providerName, display, defaultBaseURL, defaultAPI, defaultAPIKeyEnvVar, defaultModelID, defaultModelName string) (*ImportResult, error) { + fmt.Printf("\n%s API key (instance-local): ", display) + apiKey, _ := reader.ReadString('\n') + apiKey = strings.TrimSpace(apiKey) + if apiKey == "" { + return nil, fmt.Errorf("%s API key is required", display) + } + + fmt.Printf("%s model ID [%s]: ", display, defaultModelID) + modelID, _ := reader.ReadString('\n') + modelID = strings.TrimSpace(modelID) + if modelID == "" { + modelID = defaultModelID + } + + fmt.Printf("%s model display name [%s]: ", display, defaultModelName) + modelName, _ := reader.ReadString('\n') + modelName = strings.TrimSpace(modelName) + if modelName == "" { + modelName = defaultModelName + } + + fmt.Printf("%s base URL [%s]: ", display, defaultBaseURL) + baseURL, _ := reader.ReadString('\n') + baseURL = strings.TrimSpace(baseURL) + if baseURL == "" { + baseURL = defaultBaseURL + } + + return buildDirectProviderOverlay(providerName, baseURL, defaultAPI, defaultAPIKeyEnvVar, modelID, modelName, apiKey), nil +} + +// promptForCustomProvider asks for an OpenAI-compatible custom endpoint override. +func promptForCustomProvider(reader *bufio.Reader) (*ImportResult, error) { + fmt.Printf("\nCustom base URL (OpenAI-compatible, e.g. https://example.com/v1): ") + baseURL, _ := reader.ReadString('\n') + baseURL = strings.TrimSpace(baseURL) + if baseURL == "" { + return nil, fmt.Errorf("custom base URL is required") + } + + fmt.Printf("Custom model ID: ") + modelID, _ := reader.ReadString('\n') + modelID = strings.TrimSpace(modelID) + if modelID == "" { + return nil, fmt.Errorf("custom model ID is required") + } + + fmt.Printf("Custom model display name [%s]: ", modelID) + modelName, _ := reader.ReadString('\n') + modelName = strings.TrimSpace(modelName) + if modelName == "" { + modelName = modelID + } + + fmt.Printf("Custom API type [openai-completions]: ") + apiType, _ := reader.ReadString('\n') + apiType = strings.TrimSpace(apiType) + if apiType == "" { + apiType = "openai-completions" + } + + fmt.Printf("API key env var [OPENAI_API_KEY]: ") + apiKeyEnvVar, _ := reader.ReadString('\n') + apiKeyEnvVar = strings.TrimSpace(apiKeyEnvVar) + if apiKeyEnvVar == "" { + apiKeyEnvVar = "OPENAI_API_KEY" + } + + fmt.Printf("API key (optional, leave empty to configure later): ") + apiKey, _ := reader.ReadString('\n') + apiKey = strings.TrimSpace(apiKey) + if apiKey == "" { + fmt.Println(" Note: no API key provided; set it later via the OpenClaw user secret.") + } + + return buildDirectProviderOverlay("openai", baseURL, apiType, apiKeyEnvVar, modelID, modelName, apiKey), nil +} + // buildLLMSpyRoutedOverlay creates an ImportResult that routes a cloud model // through the llmspy proxy. OpenClaw sees an "ollama" provider pointing at the // cluster-wide llmspy gateway, with the cloud model in its model list. We reuse @@ -1190,6 +1432,90 @@ func buildLLMSpyRoutedOverlay(cloud *CloudProviderInfo) *ImportResult { } } +// buildDirectProviderOverlay creates an instance-local direct provider configuration. +// Provider name must be one of anthropic/openai/ollama due current chart constraints. +func buildDirectProviderOverlay(providerName, baseURL, api, apiKeyEnvVar, modelID, modelName, apiKey string) *ImportResult { + var agentPrefix string + switch providerName { + case "anthropic": + agentPrefix = "anthropic" + case "openai": + agentPrefix = "openai" + default: + agentPrefix = providerName + } + + providers := []ImportedProvider{ + {Name: "anthropic", Disabled: providerName != "anthropic"}, + {Name: "openai", Disabled: providerName != "openai"}, + {Name: "ollama", Disabled: providerName != "ollama"}, + } + for i := range providers { + if providers[i].Name != providerName { + continue + } + providers[i].Disabled = false + providers[i].BaseURL = baseURL + providers[i].API = api + providers[i].APIKeyEnvVar = apiKeyEnvVar + providers[i].APIKey = apiKey + providers[i].Models = []ImportedModel{{ID: modelID, Name: modelName}} + } + + return &ImportResult{ + AgentModel: agentPrefix + "/" + modelID, + Providers: providers, + } +} + +// collectSensitiveData extracts literal secrets from imported config and strips +// them from the in-memory overlay data so values-obol.yaml does not persist them. +func collectSensitiveData(imported *ImportResult) map[string]string { + if imported == nil { + return nil + } + + secretData := make(map[string]string) + + for i := range imported.Providers { + p := &imported.Providers[i] + if p.APIKey == "" { + continue + } + envVar := p.APIKeyEnvVar + if envVar == "" { + envVar = defaultProviderAPIKeyEnvVar(p.Name) + p.APIKeyEnvVar = envVar + } + secretData[envVar] = p.APIKey + p.APIKey = "" + } + + if imported.Channels.Telegram != nil && imported.Channels.Telegram.BotToken != "" { + secretData["TELEGRAM_BOT_TOKEN"] = imported.Channels.Telegram.BotToken + imported.Channels.Telegram.BotToken = "" + } + if imported.Channels.Discord != nil && imported.Channels.Discord.BotToken != "" { + secretData["DISCORD_BOT_TOKEN"] = imported.Channels.Discord.BotToken + imported.Channels.Discord.BotToken = "" + } + if imported.Channels.Slack != nil { + if imported.Channels.Slack.BotToken != "" { + secretData["SLACK_BOT_TOKEN"] = imported.Channels.Slack.BotToken + imported.Channels.Slack.BotToken = "" + } + if imported.Channels.Slack.AppToken != "" { + secretData["SLACK_APP_TOKEN"] = imported.Channels.Slack.AppToken + imported.Channels.Slack.AppToken = "" + } + } + + if len(secretData) == 0 { + return nil + } + return secretData +} + // generateHelmfile creates a helmfile.yaml referencing the published obol/openclaw chart. func generateHelmfile(id, namespace string) string { return fmt.Sprintf(`# OpenClaw instance: %s diff --git a/internal/openclaw/overlay_test.go b/internal/openclaw/overlay_test.go index 33eeb9c..e645f4c 100644 --- a/internal/openclaw/overlay_test.go +++ b/internal/openclaw/overlay_test.go @@ -108,9 +108,9 @@ func TestOverlayYAML_LLMSpyRouted(t *testing.T) { t.Errorf("YAML missing apiKeyEnvVar, got:\n%s", yaml) } - // apiKeyValue should be ollama-local - if !strings.Contains(yaml, "apiKeyValue: ollama-local") { - t.Errorf("YAML missing apiKeyValue, got:\n%s", yaml) + // apiKeyValue should not be emitted; secrets are injected via env vars. + if strings.Contains(yaml, "apiKeyValue:") { + t.Errorf("YAML should not contain apiKeyValue literals, got:\n%s", yaml) } // api should be openai-completions (llmspy is OpenAI-compatible) @@ -134,7 +134,7 @@ func TestOverlayYAML_LLMSpyRouted(t *testing.T) { func TestGenerateOverlayValues_OllamaDefault(t *testing.T) { // When imported is nil, generateOverlayValues should use Ollama defaults - yaml := generateOverlayValues("openclaw-default.obol.stack", nil) + yaml := generateOverlayValues("openclaw-default.obol.stack", nil, false) if !strings.Contains(yaml, "agentModel: ollama/gpt-oss:120b-cloud") { t.Errorf("default overlay missing ollama agentModel, got:\n%s", yaml) @@ -144,6 +144,76 @@ func TestGenerateOverlayValues_OllamaDefault(t *testing.T) { } } +func TestGenerateOverlayValues_ExternalSecrets(t *testing.T) { + yaml := generateOverlayValues("openclaw-default.obol.stack", nil, true) + if !strings.Contains(yaml, "extraEnvFromSecrets") { + t.Errorf("overlay missing extraEnvFromSecrets, got:\n%s", yaml) + } + if !strings.Contains(yaml, "openclaw-user-secrets") { + t.Errorf("overlay missing external secret ref, got:\n%s", yaml) + } +} + +func TestCollectSensitiveData_StripsLiterals(t *testing.T) { + imported := &ImportResult{ + Providers: []ImportedProvider{ + { + Name: "openai", + APIKey: "sk-test", + APIKeyEnvVar: "OPENAI_API_KEY", + }, + }, + Channels: ImportedChannels{ + Telegram: &ImportedTelegram{BotToken: "tg-token"}, + }, + } + + data := collectSensitiveData(imported) + if data["OPENAI_API_KEY"] != "sk-test" { + t.Fatalf("missing OPENAI_API_KEY in extracted data: %+v", data) + } + if data["TELEGRAM_BOT_TOKEN"] != "tg-token" { + t.Fatalf("missing TELEGRAM_BOT_TOKEN in extracted data: %+v", data) + } + if imported.Providers[0].APIKey != "" { + t.Fatalf("provider API key was not stripped from overlay data") + } + if imported.Channels.Telegram.BotToken != "" { + t.Fatalf("telegram token was not stripped from overlay data") + } +} + +func TestBuildDirectProviderOverlay_OpenAI(t *testing.T) { + result := buildDirectProviderOverlay( + "openai", + "https://api.openai.com/v1", + "openai-completions", + "OPENAI_API_KEY", + "gpt-5.2", + "GPT-5.2", + "sk-open-test", + ) + + if result.AgentModel != "openai/gpt-5.2" { + t.Fatalf("AgentModel = %q, want openai/gpt-5.2", result.AgentModel) + } + foundEnabled := false + for _, p := range result.Providers { + if p.Name == "openai" { + foundEnabled = true + if p.Disabled { + t.Fatalf("openai provider should be enabled") + } + if p.APIKeyEnvVar != "OPENAI_API_KEY" { + t.Fatalf("openai APIKeyEnvVar = %q", p.APIKeyEnvVar) + } + } + } + if !foundEnabled { + t.Fatalf("openai provider not found in overlay") + } +} + func TestRemoteCapableCommands(t *testing.T) { // Commands that should go through port-forward remote := []string{"gateway", "acp", "browser", "logs"}