diff --git a/astrbot/core/astr_main_agent.py b/astrbot/core/astr_main_agent.py index 4e70f3d59..6a35c042f 100644 --- a/astrbot/core/astr_main_agent.py +++ b/astrbot/core/astr_main_agent.py @@ -21,18 +21,32 @@ from astrbot.core.astr_agent_run_util import AgentRunner from astrbot.core.astr_agent_tool_exec import FunctionToolExecutor from astrbot.core.astr_main_agent_resources import ( + ANNOTATE_EXECUTION_TOOL, + BROWSER_BATCH_EXEC_TOOL, + BROWSER_EXEC_TOOL, CHATUI_SPECIAL_DEFAULT_PERSONA_PROMPT, + CREATE_SKILL_CANDIDATE_TOOL, + CREATE_SKILL_PAYLOAD_TOOL, + EVALUATE_SKILL_CANDIDATE_TOOL, EXECUTE_SHELL_TOOL, FILE_DOWNLOAD_TOOL, FILE_UPLOAD_TOOL, + GET_EXECUTION_HISTORY_TOOL, + GET_SKILL_PAYLOAD_TOOL, KNOWLEDGE_BASE_QUERY_TOOL, + LIST_SKILL_CANDIDATES_TOOL, + LIST_SKILL_RELEASES_TOOL, LIVE_MODE_SYSTEM_PROMPT, LLM_SAFETY_MODE_SYSTEM_PROMPT, LOCAL_EXECUTE_SHELL_TOOL, LOCAL_PYTHON_TOOL, + PROMOTE_SKILL_CANDIDATE_TOOL, PYTHON_TOOL, + ROLLBACK_SKILL_RELEASE_TOOL, + RUN_BROWSER_SKILL_TOOL, SANDBOX_MODE_PROMPT, SEND_MESSAGE_TO_USER_TOOL, + SYNC_SKILL_RELEASE_TOOL, TOOL_CALL_PROMPT, TOOL_CALL_PROMPT_SKILLS_LIKE_MODE, retrieve_knowledge_base, @@ -781,9 +795,11 @@ def _apply_llm_safety_mode(config: MainAgentBuildConfig, req: ProviderRequest) - def _apply_sandbox_tools( config: MainAgentBuildConfig, req: ProviderRequest, session_id: str ) -> None: + _ = session_id if req.func_tool is None: req.func_tool = ToolSet() - if config.sandbox_cfg.get("booter") == "shipyard": + booter = config.sandbox_cfg.get("booter", "shipyard_neo") + if booter == "shipyard": ep = config.sandbox_cfg.get("shipyard_endpoint", "") at = config.sandbox_cfg.get("shipyard_access_token", "") if not ep or not at: @@ -791,10 +807,28 @@ def _apply_sandbox_tools( return os.environ["SHIPYARD_ENDPOINT"] = ep os.environ["SHIPYARD_ACCESS_TOKEN"] = at + req.func_tool.add_tool(EXECUTE_SHELL_TOOL) req.func_tool.add_tool(PYTHON_TOOL) req.func_tool.add_tool(FILE_UPLOAD_TOOL) req.func_tool.add_tool(FILE_DOWNLOAD_TOOL) + + if booter == "shipyard_neo": + req.func_tool.add_tool(BROWSER_EXEC_TOOL) + req.func_tool.add_tool(BROWSER_BATCH_EXEC_TOOL) + req.func_tool.add_tool(RUN_BROWSER_SKILL_TOOL) + req.func_tool.add_tool(GET_EXECUTION_HISTORY_TOOL) + req.func_tool.add_tool(ANNOTATE_EXECUTION_TOOL) + req.func_tool.add_tool(CREATE_SKILL_PAYLOAD_TOOL) + req.func_tool.add_tool(GET_SKILL_PAYLOAD_TOOL) + req.func_tool.add_tool(CREATE_SKILL_CANDIDATE_TOOL) + req.func_tool.add_tool(LIST_SKILL_CANDIDATES_TOOL) + req.func_tool.add_tool(EVALUATE_SKILL_CANDIDATE_TOOL) + req.func_tool.add_tool(PROMOTE_SKILL_CANDIDATE_TOOL) + req.func_tool.add_tool(LIST_SKILL_RELEASES_TOOL) + req.func_tool.add_tool(ROLLBACK_SKILL_RELEASE_TOOL) + req.func_tool.add_tool(SYNC_SKILL_RELEASE_TOOL) + req.system_prompt += f"\n{SANDBOX_MODE_PROMPT}\n" diff --git a/astrbot/core/astr_main_agent_resources.py b/astrbot/core/astr_main_agent_resources.py index 1d5c085ce..e04291faf 100644 --- a/astrbot/core/astr_main_agent_resources.py +++ b/astrbot/core/astr_main_agent_resources.py @@ -12,11 +12,25 @@ from astrbot.core.astr_agent_context import AstrAgentContext from astrbot.core.computer.computer_client import get_booter from astrbot.core.computer.tools import ( + AnnotateExecutionTool, + BrowserBatchExecTool, + BrowserExecTool, + CreateSkillCandidateTool, + CreateSkillPayloadTool, + EvaluateSkillCandidateTool, ExecuteShellTool, FileDownloadTool, FileUploadTool, + GetExecutionHistoryTool, + GetSkillPayloadTool, + ListSkillCandidatesTool, + ListSkillReleasesTool, LocalPythonTool, + PromoteSkillCandidateTool, PythonTool, + RollbackSkillReleaseTool, + RunBrowserSkillTool, + SyncSkillReleaseTool, ) from astrbot.core.message.message_event_result import MessageChain from astrbot.core.platform.message_session import MessageSession @@ -446,6 +460,20 @@ async def retrieve_knowledge_base( LOCAL_PYTHON_TOOL = LocalPythonTool() FILE_UPLOAD_TOOL = FileUploadTool() FILE_DOWNLOAD_TOOL = FileDownloadTool() +BROWSER_EXEC_TOOL = BrowserExecTool() +BROWSER_BATCH_EXEC_TOOL = BrowserBatchExecTool() +RUN_BROWSER_SKILL_TOOL = RunBrowserSkillTool() +GET_EXECUTION_HISTORY_TOOL = GetExecutionHistoryTool() +ANNOTATE_EXECUTION_TOOL = AnnotateExecutionTool() +CREATE_SKILL_PAYLOAD_TOOL = CreateSkillPayloadTool() +GET_SKILL_PAYLOAD_TOOL = GetSkillPayloadTool() +CREATE_SKILL_CANDIDATE_TOOL = CreateSkillCandidateTool() +LIST_SKILL_CANDIDATES_TOOL = ListSkillCandidatesTool() +EVALUATE_SKILL_CANDIDATE_TOOL = EvaluateSkillCandidateTool() +PROMOTE_SKILL_CANDIDATE_TOOL = PromoteSkillCandidateTool() +LIST_SKILL_RELEASES_TOOL = ListSkillReleasesTool() +ROLLBACK_SKILL_RELEASE_TOOL = RollbackSkillReleaseTool() +SYNC_SKILL_RELEASE_TOOL = SyncSkillReleaseTool() # we prevent astrbot from connecting to known malicious hosts # these hosts are base64 encoded diff --git a/astrbot/core/computer/booters/base.py b/astrbot/core/computer/booters/base.py index ea93a3d6d..d3f107450 100644 --- a/astrbot/core/computer/booters/base.py +++ b/astrbot/core/computer/booters/base.py @@ -1,4 +1,9 @@ -from ..olayer import FileSystemComponent, PythonComponent, ShellComponent +from ..olayer import ( + BrowserComponent, + FileSystemComponent, + PythonComponent, + ShellComponent, +) class ComputerBooter: @@ -11,6 +16,12 @@ def python(self) -> PythonComponent: ... @property def shell(self) -> ShellComponent: ... + @property + def browser(self) -> BrowserComponent: + raise NotImplementedError( + f"{self.__class__.__name__} does not support browser capability." + ) + async def boot(self, session_id: str) -> None: ... async def shutdown(self) -> None: ... diff --git a/astrbot/core/computer/booters/shipyard_neo.py b/astrbot/core/computer/booters/shipyard_neo.py new file mode 100644 index 000000000..1021c6df6 --- /dev/null +++ b/astrbot/core/computer/booters/shipyard_neo.py @@ -0,0 +1,368 @@ +from __future__ import annotations + +import os +import shlex +from typing import Any, cast + +from astrbot.api import logger + +from ..olayer import ( + BrowserComponent, + FileSystemComponent, + PythonComponent, + ShellComponent, +) +from .base import ComputerBooter + + +def _maybe_model_dump(value: Any) -> dict[str, Any]: + if isinstance(value, dict): + return value + if hasattr(value, "model_dump"): + dumped = value.model_dump() + if isinstance(dumped, dict): + return dumped + return {} + + +class NeoPythonComponent(PythonComponent): + def __init__(self, sandbox: Any) -> None: + self._sandbox = sandbox + + async def exec( + self, + code: str, + kernel_id: str | None = None, + timeout: int = 30, + silent: bool = False, + ) -> dict[str, Any]: + _ = kernel_id # Bay runtime does not expose kernel_id in current SDK. + result = await self._sandbox.python.exec(code, timeout=timeout) + payload = _maybe_model_dump(result) + + output_text = payload.get("output", "") or "" + error_text = payload.get("error", "") or "" + data = payload.get("data") if isinstance(payload.get("data"), dict) else {} + rich_output = data.get("output") if isinstance(data.get("output"), dict) else {} + if not isinstance(rich_output.get("images"), list): + rich_output["images"] = [] + if "text" not in rich_output: + rich_output["text"] = output_text + + if silent: + rich_output["text"] = "" + + return { + "success": bool(payload.get("success", error_text == "")), + "data": { + "output": rich_output, + "error": error_text, + }, + "execution_id": payload.get("execution_id"), + "execution_time_ms": payload.get("execution_time_ms"), + "code": payload.get("code"), + "output": output_text, + "error": error_text, + } + + +class NeoShellComponent(ShellComponent): + def __init__(self, sandbox: Any) -> None: + self._sandbox = sandbox + + async def exec( + self, + command: str, + cwd: str | None = None, + env: dict[str, str] | None = None, + timeout: int | None = 30, + shell: bool = True, + background: bool = False, + ) -> dict[str, Any]: + if not shell: + return { + "stdout": "", + "stderr": "error: only shell mode is supported in shipyard_neo booter.", + "exit_code": 2, + "success": False, + } + + run_command = command + if env: + env_prefix = " ".join( + f"{k}={shlex.quote(str(v))}" for k, v in sorted(env.items()) + ) + run_command = f"{env_prefix} {run_command}" + + if background: + run_command = f"nohup sh -lc {shlex.quote(run_command)} >/tmp/astrbot_bg.log 2>&1 & echo $!" + + result = await self._sandbox.shell.exec( + run_command, + timeout=timeout or 30, + cwd=cwd, + ) + payload = _maybe_model_dump(result) + + stdout = payload.get("output", "") or "" + stderr = payload.get("error", "") or "" + exit_code = payload.get("exit_code") + if background: + pid: int | None = None + try: + pid = int(stdout.strip().splitlines()[-1]) + except Exception: + pid = None + return { + "pid": pid, + "stdout": stdout, + "stderr": stderr, + "exit_code": exit_code, + "success": bool(payload.get("success", not stderr)), + "execution_id": payload.get("execution_id"), + "execution_time_ms": payload.get("execution_time_ms"), + "command": payload.get("command"), + } + + return { + "stdout": stdout, + "stderr": stderr, + "exit_code": exit_code, + "success": bool(payload.get("success", not stderr)), + "execution_id": payload.get("execution_id"), + "execution_time_ms": payload.get("execution_time_ms"), + "command": payload.get("command"), + } + + +class NeoFileSystemComponent(FileSystemComponent): + def __init__(self, sandbox: Any) -> None: + self._sandbox = sandbox + + async def create_file( + self, + path: str, + content: str = "", + mode: int = 0o644, + ) -> dict[str, Any]: + _ = mode + await self._sandbox.filesystem.write_file(path, content) + return {"success": True, "path": path} + + async def read_file(self, path: str, encoding: str = "utf-8") -> dict[str, Any]: + _ = encoding + content = await self._sandbox.filesystem.read_file(path) + return {"success": True, "path": path, "content": content} + + async def write_file( + self, + path: str, + content: str, + mode: str = "w", + encoding: str = "utf-8", + ) -> dict[str, Any]: + _ = mode + _ = encoding + await self._sandbox.filesystem.write_file(path, content) + return {"success": True, "path": path} + + async def delete_file(self, path: str) -> dict[str, Any]: + await self._sandbox.filesystem.delete(path) + return {"success": True, "path": path} + + async def list_dir( + self, + path: str = ".", + show_hidden: bool = False, + ) -> dict[str, Any]: + entries = await self._sandbox.filesystem.list_dir(path) + data = [] + for entry in entries: + item = _maybe_model_dump(entry) + if not show_hidden and str(item.get("name", "")).startswith("."): + continue + data.append(item) + return {"success": True, "path": path, "entries": data} + + +class NeoBrowserComponent(BrowserComponent): + def __init__(self, sandbox: Any) -> None: + self._sandbox = sandbox + + async def exec( + self, + cmd: str, + timeout: int = 30, + description: str | None = None, + tags: str | None = None, + learn: bool = False, + include_trace: bool = False, + ) -> dict[str, Any]: + result = await self._sandbox.browser.exec( + cmd, + timeout=timeout, + description=description, + tags=tags, + learn=learn, + include_trace=include_trace, + ) + return _maybe_model_dump(result) + + async def exec_batch( + self, + commands: list[str], + timeout: int = 60, + stop_on_error: bool = True, + description: str | None = None, + tags: str | None = None, + learn: bool = False, + include_trace: bool = False, + ) -> dict[str, Any]: + result = await self._sandbox.browser.exec_batch( + commands, + timeout=timeout, + stop_on_error=stop_on_error, + description=description, + tags=tags, + learn=learn, + include_trace=include_trace, + ) + return _maybe_model_dump(result) + + async def run_skill( + self, + skill_key: str, + timeout: int = 60, + stop_on_error: bool = True, + include_trace: bool = False, + description: str | None = None, + tags: str | None = None, + ) -> dict[str, Any]: + result = await self._sandbox.browser.run_skill( + skill_key=skill_key, + timeout=timeout, + stop_on_error=stop_on_error, + include_trace=include_trace, + description=description, + tags=tags, + ) + return _maybe_model_dump(result) + + +class ShipyardNeoBooter(ComputerBooter): + def __init__( + self, + endpoint_url: str, + access_token: str, + profile: str = "python-default", + ttl: int = 3600, + ) -> None: + self._endpoint_url = endpoint_url + self._access_token = access_token + self._profile = profile + self._ttl = ttl + self._client: Any = None + self._sandbox: Any = None + self._fs: FileSystemComponent | None = None + self._python: PythonComponent | None = None + self._shell: ShellComponent | None = None + self._browser: BrowserComponent | None = None + + @property + def bay_client(self) -> Any: + return self._client + + @property + def sandbox(self) -> Any: + return self._sandbox + + async def boot(self, session_id: str) -> None: + _ = session_id + if not self._endpoint_url or not self._access_token: + raise ValueError("Shipyard Neo sandbox configuration is incomplete.") + + from shipyard_neo import BayClient + + self._client = BayClient( + endpoint_url=self._endpoint_url, + access_token=self._access_token, + ) + await self._client.__aenter__() + self._sandbox = await self._client.create_sandbox( + profile=self._profile or "python-default", + ttl=self._ttl, + ) + + self._fs = NeoFileSystemComponent(self._sandbox) + self._python = NeoPythonComponent(self._sandbox) + self._shell = NeoShellComponent(self._sandbox) + self._browser = NeoBrowserComponent(self._sandbox) + logger.info( + "Got Shipyard Neo sandbox: %s (profile=%s)", + self._sandbox.id, + self._profile or "python-default", + ) + + async def shutdown(self) -> None: + if self._client is not None: + await self._client.__aexit__(None, None, None) + self._client = None + self._sandbox = None + + @property + def fs(self) -> FileSystemComponent: + if self._fs is None: + raise RuntimeError("ShipyardNeoBooter is not initialized.") + return self._fs + + @property + def python(self) -> PythonComponent: + if self._python is None: + raise RuntimeError("ShipyardNeoBooter is not initialized.") + return self._python + + @property + def shell(self) -> ShellComponent: + if self._shell is None: + raise RuntimeError("ShipyardNeoBooter is not initialized.") + return self._shell + + @property + def browser(self) -> BrowserComponent: + if self._browser is None: + raise RuntimeError("ShipyardNeoBooter is not initialized.") + return self._browser + + async def upload_file(self, path: str, file_name: str) -> dict: + if self._sandbox is None: + raise RuntimeError("ShipyardNeoBooter is not initialized.") + with open(path, "rb") as f: + content = f.read() + remote_path = file_name.lstrip("/") + await self._sandbox.filesystem.upload(remote_path, content) + return { + "success": True, + "message": "File uploaded successfully", + "file_path": remote_path, + } + + async def download_file(self, remote_path: str, local_path: str) -> None: + if self._sandbox is None: + raise RuntimeError("ShipyardNeoBooter is not initialized.") + content = await self._sandbox.filesystem.download(remote_path.lstrip("/")) + local_dir = os.path.dirname(local_path) + if local_dir: + os.makedirs(local_dir, exist_ok=True) + with open(local_path, "wb") as f: + f.write(cast(bytes, content)) + + async def available(self) -> bool: + if self._sandbox is None: + return False + try: + await self._sandbox.refresh() + status = getattr(self._sandbox.status, "value", str(self._sandbox.status)) + return status not in {"failed", "expired"} + except Exception as e: + logger.error(f"Error checking Shipyard Neo sandbox availability: {e}") + return False diff --git a/astrbot/core/computer/computer_client.py b/astrbot/core/computer/computer_client.py index 9750e7b64..bf698b941 100644 --- a/astrbot/core/computer/computer_client.py +++ b/astrbot/core/computer/computer_client.py @@ -1,10 +1,10 @@ -import os +import json import shutil import uuid from pathlib import Path from astrbot.api import logger -from astrbot.core.skills.skill_manager import SANDBOX_SKILLS_ROOT +from astrbot.core.skills.skill_manager import SANDBOX_SKILLS_ROOT, SkillManager from astrbot.core.star.context import Context from astrbot.core.utils.astrbot_path import ( get_astrbot_skills_path, @@ -16,45 +16,225 @@ session_booter: dict[str, ComputerBooter] = {} local_booter: ComputerBooter | None = None +_MANAGED_SKILLS_FILE = ".astrbot_managed_skills.json" -async def _sync_skills_to_sandbox(booter: ComputerBooter) -> None: - skills_root = get_astrbot_skills_path() - if not os.path.isdir(skills_root): - return - if not any(Path(skills_root).iterdir()): +def _list_local_skill_dirs(skills_root: Path) -> list[Path]: + skills: list[Path] = [] + for entry in sorted(skills_root.iterdir()): + if not entry.is_dir(): + continue + skill_md = entry / "SKILL.md" + if skill_md.exists(): + skills.append(entry) + return skills + + +def _build_sync_and_scan_command() -> str: + script = f""" +import json +import shutil +import zipfile +from pathlib import Path + +root = Path({SANDBOX_SKILLS_ROOT!r}) +zip_path = root / "skills.zip" +tmp_extract = Path(f"{{root}}_tmp_extract") +managed_file = root / {_MANAGED_SKILLS_FILE!r} + + +def parse_description(text: str) -> str: + if not text.startswith("---"): + return "" + lines = text.splitlines() + if not lines or lines[0].strip() != "---": + return "" + end_idx = None + for i in range(1, len(lines)): + if lines[i].strip() == "---": + end_idx = i + break + if end_idx is None: + return "" + for line in lines[1:end_idx]: + if ":" not in line: + continue + key, value = line.split(":", 1) + if key.strip().lower() == "description": + return value.strip().strip('"').strip("'") + return "" + + +def remove_tree(path: Path) -> None: + if not path.exists(): return + if path.is_dir(): + shutil.rmtree(path, ignore_errors=True) + else: + path.unlink(missing_ok=True) - temp_dir = get_astrbot_temp_path() - os.makedirs(temp_dir, exist_ok=True) - zip_base = os.path.join(temp_dir, "skills_bundle") - zip_path = f"{zip_base}.zip" +def load_managed_skills() -> list[str]: + if not managed_file.exists(): + return [] try: - if os.path.exists(zip_path): - os.remove(zip_path) - shutil.make_archive(zip_base, "zip", skills_root) - remote_zip = Path(SANDBOX_SKILLS_ROOT) / "skills.zip" - logger.info("Uploading skills bundle to sandbox...") - await booter.shell.exec(f"mkdir -p {SANDBOX_SKILLS_ROOT}") - upload_result = await booter.upload_file(zip_path, str(remote_zip)) - if not upload_result.get("success", False): - raise RuntimeError("Failed to upload skills bundle to sandbox.") - # Use -n flag to never overwrite existing files, fallback to Python if unzip unavailable - await booter.shell.exec( - f"unzip -n {remote_zip} -d {SANDBOX_SKILLS_ROOT} || " - f"python3 -c \"import zipfile, os, pathlib; z=zipfile.ZipFile('{remote_zip}'); " - f"[z.extract(m, '{SANDBOX_SKILLS_ROOT}') for m in z.namelist() " - f"if not os.path.exists(os.path.join('{SANDBOX_SKILLS_ROOT}', m))]\" || " - f"python -c \"import zipfile, os, pathlib; z=zipfile.ZipFile('{remote_zip}'); " - f"[z.extract(m, '{SANDBOX_SKILLS_ROOT}') for m in z.namelist() " - f"if not os.path.exists(os.path.join('{SANDBOX_SKILLS_ROOT}', m))]\"; " - f"rm -f {remote_zip}" + payload = json.loads(managed_file.read_text(encoding="utf-8")) + except Exception: + return [] + if not isinstance(payload, dict): + return [] + items = payload.get("managed_skills", []) + if not isinstance(items, list): + return [] + result: list[str] = [] + for item in items: + if isinstance(item, str) and item.strip(): + result.append(item.strip()) + return result + + +def collect_skills() -> list[dict[str, str]]: + skills: list[dict[str, str]] = [] + if not root.exists(): + return skills + for skill_dir in sorted(root.iterdir()): + if not skill_dir.is_dir(): + continue + skill_md = skill_dir / "SKILL.md" + if not skill_md.is_file(): + continue + description = "" + try: + text = skill_md.read_text(encoding="utf-8") + description = parse_description(text) + except Exception: + description = "" + skills.append( + {{ + "name": skill_dir.name, + "description": description, + "path": f"{SANDBOX_SKILLS_ROOT}/{{skill_dir.name}}/SKILL.md", + }} ) + return skills + + +root.mkdir(parents=True, exist_ok=True) +for managed_name in load_managed_skills(): + remove_tree(root / managed_name) + +current_managed: list[str] = [] +if zip_path.exists(): + remove_tree(tmp_extract) + tmp_extract.mkdir(parents=True, exist_ok=True) + with zipfile.ZipFile(zip_path) as zf: + zf.extractall(tmp_extract) + for entry in sorted(tmp_extract.iterdir()): + if not entry.is_dir(): + continue + target = root / entry.name + remove_tree(target) + shutil.copytree(entry, target) + current_managed.append(entry.name) + +remove_tree(tmp_extract) +remove_tree(zip_path) +managed_file.write_text( + json.dumps({{"managed_skills": current_managed}}, ensure_ascii=False, indent=2), + encoding="utf-8", +) +print( + json.dumps( + {{ + "managed_skills": current_managed, + "skills": collect_skills(), + }}, + ensure_ascii=False, + ) +) +""".strip() + return ( + "if command -v python3 >/dev/null 2>&1; then PYBIN=python3; " + "elif command -v python >/dev/null 2>&1; then PYBIN=python; " + "else echo 'python not found in sandbox' >&2; exit 127; fi; " + "$PYBIN - <<'PY'\n" + f"{script}\n" + "PY" + ) + + +def _shell_exec_succeeded(result: dict) -> bool: + if "success" in result: + return bool(result.get("success")) + exit_code = result.get("exit_code") + return exit_code in (0, None) + + +def _decode_sync_payload(stdout: str) -> dict | None: + text = stdout.strip() + if not text: + return None + candidates = [text] + candidates.extend([line.strip() for line in text.splitlines() if line.strip()]) + for candidate in reversed(candidates): + try: + payload = json.loads(candidate) + except Exception: + continue + if isinstance(payload, dict): + return payload + return None + + +def _update_sandbox_skills_cache(payload: dict | None) -> None: + if not isinstance(payload, dict): + return + skills = payload.get("skills", []) + if not isinstance(skills, list): + return + SkillManager().set_sandbox_skills_cache(skills) + + +async def _sync_skills_to_sandbox(booter: ComputerBooter) -> None: + skills_root = Path(get_astrbot_skills_path()) + if not skills_root.is_dir(): + return + local_skill_dirs = _list_local_skill_dirs(skills_root) + + temp_dir = Path(get_astrbot_temp_path()) + temp_dir.mkdir(parents=True, exist_ok=True) + zip_base = temp_dir / "skills_bundle" + zip_path = zip_base.with_suffix(".zip") + + try: + if local_skill_dirs: + if zip_path.exists(): + zip_path.unlink() + shutil.make_archive(str(zip_base), "zip", str(skills_root)) + remote_zip = Path(SANDBOX_SKILLS_ROOT) / "skills.zip" + logger.info("Uploading skills bundle to sandbox...") + await booter.shell.exec(f"mkdir -p {SANDBOX_SKILLS_ROOT}") + upload_result = await booter.upload_file(str(zip_path), str(remote_zip)) + if not upload_result.get("success", False): + raise RuntimeError("Failed to upload skills bundle to sandbox.") + else: + logger.info( + "No local skills found. Keeping sandbox built-ins and refreshing metadata." + ) + await booter.shell.exec(f"rm -f {SANDBOX_SKILLS_ROOT}/skills.zip") + + sync_result = await booter.shell.exec(_build_sync_and_scan_command()) + if not _shell_exec_succeeded(sync_result): + raise RuntimeError( + "Failed to apply sandbox skill sync strategy: " + f"stderr={sync_result.get('stderr', '')}" + ) + payload = _decode_sync_payload(str(sync_result.get("stdout", "") or "")) + _update_sandbox_skills_cache(payload) finally: - if os.path.exists(zip_path): + if zip_path.exists(): try: - os.remove(zip_path) + zip_path.unlink() except Exception: logger.warning(f"Failed to remove temp skills zip: {zip_path}") @@ -66,7 +246,7 @@ async def get_booter( config = context.get_config(umo=session_id) sandbox_cfg = config.get("provider_settings", {}).get("sandbox", {}) - booter_type = sandbox_cfg.get("booter", "shipyard") + booter_type = sandbox_cfg.get("booter", "shipyard_neo") if session_id in session_booter: booter = session_booter[session_id] @@ -86,6 +266,19 @@ async def get_booter( client = ShipyardBooter( endpoint_url=ep, access_token=token, ttl=ttl, session_num=max_sessions ) + elif booter_type == "shipyard_neo": + from .booters.shipyard_neo import ShipyardNeoBooter + + ep = sandbox_cfg.get("shipyard_neo_endpoint", "") + token = sandbox_cfg.get("shipyard_neo_access_token", "") + ttl = sandbox_cfg.get("shipyard_neo_ttl", 3600) + profile = sandbox_cfg.get("shipyard_neo_profile", "python-default") + client = ShipyardNeoBooter( + endpoint_url=ep, + access_token=token, + profile=profile, + ttl=ttl, + ) elif booter_type == "boxlite": from .booters.boxlite import BoxliteBooter @@ -104,6 +297,21 @@ async def get_booter( return session_booter[session_id] +async def sync_skills_to_active_sandboxes() -> None: + """Best-effort skills synchronization for all active sandbox sessions.""" + for session_id, booter in list(session_booter.items()): + try: + if not await booter.available(): + continue + await _sync_skills_to_sandbox(booter) + except Exception as e: + logger.warning( + "Failed to sync skills to sandbox for session %s: %s", + session_id, + e, + ) + + def get_local_booter() -> ComputerBooter: global local_booter if local_booter is None: diff --git a/astrbot/core/computer/olayer/__init__.py b/astrbot/core/computer/olayer/__init__.py index f099c079a..e2348671e 100644 --- a/astrbot/core/computer/olayer/__init__.py +++ b/astrbot/core/computer/olayer/__init__.py @@ -1,5 +1,11 @@ +from .browser import BrowserComponent from .filesystem import FileSystemComponent from .python import PythonComponent from .shell import ShellComponent -__all__ = ["PythonComponent", "ShellComponent", "FileSystemComponent"] +__all__ = [ + "PythonComponent", + "ShellComponent", + "FileSystemComponent", + "BrowserComponent", +] diff --git a/astrbot/core/computer/olayer/browser.py b/astrbot/core/computer/olayer/browser.py new file mode 100644 index 000000000..aa69f4501 --- /dev/null +++ b/astrbot/core/computer/olayer/browser.py @@ -0,0 +1,46 @@ +""" +Browser automation component +""" + +from typing import Any, Protocol + + +class BrowserComponent(Protocol): + """Browser operations component""" + + async def exec( + self, + cmd: str, + timeout: int = 30, + description: str | None = None, + tags: str | None = None, + learn: bool = False, + include_trace: bool = False, + ) -> dict[str, Any]: + """Execute a browser automation command""" + ... + + async def exec_batch( + self, + commands: list[str], + timeout: int = 60, + stop_on_error: bool = True, + description: str | None = None, + tags: str | None = None, + learn: bool = False, + include_trace: bool = False, + ) -> dict[str, Any]: + """Execute a browser automation command batch""" + ... + + async def run_skill( + self, + skill_key: str, + timeout: int = 60, + stop_on_error: bool = True, + include_trace: bool = False, + description: str | None = None, + tags: str | None = None, + ) -> dict[str, Any]: + """Run a browser skill by skill key""" + ... diff --git a/astrbot/core/computer/tools/__init__.py b/astrbot/core/computer/tools/__init__.py index 79994fb9b..598abbb6e 100644 --- a/astrbot/core/computer/tools/__init__.py +++ b/astrbot/core/computer/tools/__init__.py @@ -1,8 +1,36 @@ +from .browser import BrowserBatchExecTool, BrowserExecTool, RunBrowserSkillTool from .fs import FileDownloadTool, FileUploadTool +from .neo_skills import ( + AnnotateExecutionTool, + CreateSkillCandidateTool, + CreateSkillPayloadTool, + EvaluateSkillCandidateTool, + GetExecutionHistoryTool, + GetSkillPayloadTool, + ListSkillCandidatesTool, + ListSkillReleasesTool, + PromoteSkillCandidateTool, + RollbackSkillReleaseTool, + SyncSkillReleaseTool, +) from .python import LocalPythonTool, PythonTool from .shell import ExecuteShellTool __all__ = [ + "BrowserExecTool", + "BrowserBatchExecTool", + "RunBrowserSkillTool", + "GetExecutionHistoryTool", + "AnnotateExecutionTool", + "CreateSkillPayloadTool", + "GetSkillPayloadTool", + "CreateSkillCandidateTool", + "ListSkillCandidatesTool", + "EvaluateSkillCandidateTool", + "PromoteSkillCandidateTool", + "ListSkillReleasesTool", + "RollbackSkillReleaseTool", + "SyncSkillReleaseTool", "FileUploadTool", "PythonTool", "LocalPythonTool", diff --git a/astrbot/core/computer/tools/browser.py b/astrbot/core/computer/tools/browser.py new file mode 100644 index 000000000..70061ac31 --- /dev/null +++ b/astrbot/core/computer/tools/browser.py @@ -0,0 +1,204 @@ +import json +from dataclasses import dataclass, field +from typing import Any + +from astrbot.api import FunctionTool +from astrbot.core.agent.run_context import ContextWrapper +from astrbot.core.agent.tool import ToolExecResult +from astrbot.core.astr_agent_context import AstrAgentContext + +from ..computer_client import get_booter + + +def _to_json(data: Any) -> str: + return json.dumps(data, ensure_ascii=False, default=str) + + +def _ensure_admin(context: ContextWrapper[AstrAgentContext]) -> str | None: + if context.context.event.role != "admin": + return ( + "error: Permission denied. Browser and skill lifecycle tools are only allowed " + "for admin users." + ) + return None + + +async def _get_browser_component(context: ContextWrapper[AstrAgentContext]) -> Any: + booter = await get_booter( + context.context.context, + context.context.event.unified_msg_origin, + ) + browser = getattr(booter, "browser", None) + if browser is None: + raise RuntimeError( + "Current sandbox booter does not support browser capability. " + "Please switch to shipyard_neo." + ) + return browser + + +@dataclass +class BrowserExecTool(FunctionTool): + name: str = "astrbot_execute_browser" + description: str = "Execute one browser automation command in the sandbox." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "cmd": {"type": "string", "description": "Browser command to execute."}, + "timeout": {"type": "integer", "default": 30}, + "description": { + "type": "string", + "description": "Optional execution description.", + }, + "tags": {"type": "string", "description": "Optional tags."}, + "learn": { + "type": "boolean", + "description": "Whether to mark execution as learn evidence.", + "default": False, + }, + "include_trace": { + "type": "boolean", + "description": "Whether to include trace_ref in response.", + "default": False, + }, + }, + "required": ["cmd"], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + cmd: str, + timeout: int = 30, + description: str | None = None, + tags: str | None = None, + learn: bool = False, + include_trace: bool = False, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + browser = await _get_browser_component(context) + result = await browser.exec( + cmd=cmd, + timeout=timeout, + description=description, + tags=tags, + learn=learn, + include_trace=include_trace, + ) + return _to_json(result) + except Exception as e: + return f"Error executing browser command: {str(e)}" + + +@dataclass +class BrowserBatchExecTool(FunctionTool): + name: str = "astrbot_execute_browser_batch" + description: str = "Execute a browser command batch in the sandbox." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "commands": { + "type": "array", + "items": {"type": "string"}, + "description": "Ordered browser commands.", + }, + "timeout": {"type": "integer", "default": 60}, + "stop_on_error": {"type": "boolean", "default": True}, + "description": { + "type": "string", + "description": "Optional execution description.", + }, + "tags": {"type": "string", "description": "Optional tags."}, + "learn": { + "type": "boolean", + "description": "Whether to mark execution as learn evidence.", + "default": False, + }, + "include_trace": { + "type": "boolean", + "description": "Whether to include trace_ref in response.", + "default": False, + }, + }, + "required": ["commands"], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + commands: list[str], + timeout: int = 60, + stop_on_error: bool = True, + description: str | None = None, + tags: str | None = None, + learn: bool = False, + include_trace: bool = False, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + browser = await _get_browser_component(context) + result = await browser.exec_batch( + commands=commands, + timeout=timeout, + stop_on_error=stop_on_error, + description=description, + tags=tags, + learn=learn, + include_trace=include_trace, + ) + return _to_json(result) + except Exception as e: + return f"Error executing browser batch command: {str(e)}" + + +@dataclass +class RunBrowserSkillTool(FunctionTool): + name: str = "astrbot_run_browser_skill" + description: str = "Run a released browser skill in the sandbox by skill_key." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "skill_key": {"type": "string"}, + "timeout": {"type": "integer", "default": 60}, + "stop_on_error": {"type": "boolean", "default": True}, + "include_trace": {"type": "boolean", "default": False}, + "description": {"type": "string"}, + "tags": {"type": "string"}, + }, + "required": ["skill_key"], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + skill_key: str, + timeout: int = 60, + stop_on_error: bool = True, + include_trace: bool = False, + description: str | None = None, + tags: str | None = None, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + browser = await _get_browser_component(context) + result = await browser.run_skill( + skill_key=skill_key, + timeout=timeout, + stop_on_error=stop_on_error, + include_trace=include_trace, + description=description, + tags=tags, + ) + return _to_json(result) + except Exception as e: + return f"Error running browser skill: {str(e)}" diff --git a/astrbot/core/computer/tools/neo_skills.py b/astrbot/core/computer/tools/neo_skills.py new file mode 100644 index 000000000..440538451 --- /dev/null +++ b/astrbot/core/computer/tools/neo_skills.py @@ -0,0 +1,545 @@ +import json +from dataclasses import dataclass, field +from typing import Any + +from astrbot.api import FunctionTool +from astrbot.core.agent.run_context import ContextWrapper +from astrbot.core.agent.tool import ToolExecResult +from astrbot.core.astr_agent_context import AstrAgentContext +from astrbot.core.skills.neo_skill_sync import NeoSkillSyncManager + +from ..computer_client import get_booter + + +def _to_jsonable(model_like: Any) -> Any: + if isinstance(model_like, dict): + return model_like + if isinstance(model_like, list): + return [_to_jsonable(i) for i in model_like] + if hasattr(model_like, "model_dump"): + return _to_jsonable(model_like.model_dump()) + return model_like + + +def _to_json_text(data: Any) -> str: + return json.dumps(_to_jsonable(data), ensure_ascii=False, default=str) + + +def _ensure_admin(context: ContextWrapper[AstrAgentContext]) -> str | None: + if context.context.event.role != "admin": + return "error: Permission denied. Skill lifecycle tools are only allowed for admin users." + return None + + +async def _get_neo_context( + context: ContextWrapper[AstrAgentContext], +) -> tuple[Any, Any]: + booter = await get_booter( + context.context.context, + context.context.event.unified_msg_origin, + ) + client = getattr(booter, "bay_client", None) + sandbox = getattr(booter, "sandbox", None) + if client is None or sandbox is None: + raise RuntimeError( + "Current sandbox booter does not support Neo skill lifecycle APIs. " + "Please switch to shipyard_neo." + ) + return client, sandbox + + +@dataclass +class GetExecutionHistoryTool(FunctionTool): + name: str = "astrbot_get_execution_history" + description: str = "Get execution history from current sandbox." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "exec_type": {"type": "string"}, + "success_only": {"type": "boolean", "default": False}, + "limit": {"type": "integer", "default": 100}, + "offset": {"type": "integer", "default": 0}, + "tags": {"type": "string"}, + "has_notes": {"type": "boolean", "default": False}, + "has_description": {"type": "boolean", "default": False}, + }, + "required": [], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + exec_type: str | None = None, + success_only: bool = False, + limit: int = 100, + offset: int = 0, + tags: str | None = None, + has_notes: bool = False, + has_description: bool = False, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + _client, sandbox = await _get_neo_context(context) + result = await sandbox.get_execution_history( + exec_type=exec_type, + success_only=success_only, + limit=limit, + offset=offset, + tags=tags, + has_notes=has_notes, + has_description=has_description, + ) + return _to_json_text(result) + except Exception as e: + return f"Error getting execution history: {str(e)}" + + +@dataclass +class AnnotateExecutionTool(FunctionTool): + name: str = "astrbot_annotate_execution" + description: str = "Annotate one execution history record." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "execution_id": {"type": "string"}, + "description": {"type": "string"}, + "tags": {"type": "string"}, + "notes": {"type": "string"}, + }, + "required": ["execution_id"], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + execution_id: str, + description: str | None = None, + tags: str | None = None, + notes: str | None = None, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + _client, sandbox = await _get_neo_context(context) + result = await sandbox.annotate_execution( + execution_id=execution_id, + description=description, + tags=tags, + notes=notes, + ) + return _to_json_text(result) + except Exception as e: + return f"Error annotating execution: {str(e)}" + + +@dataclass +class CreateSkillPayloadTool(FunctionTool): + name: str = "astrbot_create_skill_payload" + description: str = "Create a generic skill payload and return payload_ref." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "payload": { + "anyOf": [{"type": "object"}, {"type": "array"}], + "description": ( + "Skill payload JSON. Recommended fields: skill_markdown, commands, meta." + ), + }, + "kind": { + "type": "string", + "description": "Payload kind.", + "default": "astrbot_skill_v1", + }, + }, + "required": ["payload"], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + payload: dict[str, Any] | list[Any], + kind: str = "astrbot_skill_v1", + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + client, _sandbox = await _get_neo_context(context) + result = await client.skills.create_payload(payload=payload, kind=kind) + return _to_json_text(result) + except Exception as e: + return f"Error creating skill payload: {str(e)}" + + +@dataclass +class GetSkillPayloadTool(FunctionTool): + name: str = "astrbot_get_skill_payload" + description: str = "Get one skill payload by payload_ref." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "payload_ref": {"type": "string"}, + }, + "required": ["payload_ref"], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + payload_ref: str, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + client, _sandbox = await _get_neo_context(context) + result = await client.skills.get_payload(payload_ref) + return _to_json_text(result) + except Exception as e: + return f"Error getting skill payload: {str(e)}" + + +@dataclass +class CreateSkillCandidateTool(FunctionTool): + name: str = "astrbot_create_skill_candidate" + description: str = "Create a skill candidate from source execution IDs." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "skill_key": {"type": "string"}, + "source_execution_ids": { + "type": "array", + "items": {"type": "string"}, + }, + "scenario_key": {"type": "string"}, + "payload_ref": {"type": "string"}, + }, + "required": ["skill_key", "source_execution_ids"], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + skill_key: str, + source_execution_ids: list[str], + scenario_key: str | None = None, + payload_ref: str | None = None, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + client, _sandbox = await _get_neo_context(context) + result = await client.skills.create_candidate( + skill_key=skill_key, + source_execution_ids=source_execution_ids, + scenario_key=scenario_key, + payload_ref=payload_ref, + ) + return _to_json_text(result) + except Exception as e: + return f"Error creating skill candidate: {str(e)}" + + +@dataclass +class ListSkillCandidatesTool(FunctionTool): + name: str = "astrbot_list_skill_candidates" + description: str = "List skill candidates." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "status": {"type": "string"}, + "skill_key": {"type": "string"}, + "limit": {"type": "integer", "default": 100}, + "offset": {"type": "integer", "default": 0}, + }, + "required": [], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + status: str | None = None, + skill_key: str | None = None, + limit: int = 100, + offset: int = 0, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + client, _sandbox = await _get_neo_context(context) + result = await client.skills.list_candidates( + status=status, + skill_key=skill_key, + limit=limit, + offset=offset, + ) + return _to_json_text(result) + except Exception as e: + return f"Error listing skill candidates: {str(e)}" + + +@dataclass +class EvaluateSkillCandidateTool(FunctionTool): + name: str = "astrbot_evaluate_skill_candidate" + description: str = "Evaluate a skill candidate." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "candidate_id": {"type": "string"}, + "passed": {"type": "boolean"}, + "score": {"type": "number"}, + "benchmark_id": {"type": "string"}, + "report": {"type": "string"}, + }, + "required": ["candidate_id", "passed"], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + candidate_id: str, + passed: bool, + score: float | None = None, + benchmark_id: str | None = None, + report: str | None = None, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + client, _sandbox = await _get_neo_context(context) + result = await client.skills.evaluate_candidate( + candidate_id, + passed=passed, + score=score, + benchmark_id=benchmark_id, + report=report, + ) + return _to_json_text(result) + except Exception as e: + return f"Error evaluating skill candidate: {str(e)}" + + +@dataclass +class PromoteSkillCandidateTool(FunctionTool): + name: str = "astrbot_promote_skill_candidate" + description: str = "Promote one candidate to release stage (canary/stable)." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "candidate_id": {"type": "string"}, + "stage": { + "type": "string", + "description": "Release stage: canary/stable", + "default": "canary", + }, + "sync_to_local": { + "type": "boolean", + "description": "When stage is stable, sync payload.skill_markdown to local SKILL.md.", + "default": True, + }, + }, + "required": ["candidate_id"], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + candidate_id: str, + stage: str = "canary", + sync_to_local: bool = True, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + if stage not in {"canary", "stable"}: + return "Error promoting skill candidate: stage must be canary or stable." + + try: + client, _sandbox = await _get_neo_context(context) + release = await client.skills.promote_candidate(candidate_id, stage=stage) + release_json = _to_jsonable(release) + + sync_json: dict[str, Any] | None = None + rollback_json: dict[str, Any] | None = None + if stage == "stable" and sync_to_local: + sync_mgr = NeoSkillSyncManager() + try: + sync_result = await sync_mgr.sync_release( + client, + release_id=str(release_json.get("id", "")), + require_stable=True, + ) + sync_json = { + "skill_key": sync_result.skill_key, + "local_skill_name": sync_result.local_skill_name, + "release_id": sync_result.release_id, + "candidate_id": sync_result.candidate_id, + "payload_ref": sync_result.payload_ref, + "map_path": sync_result.map_path, + "synced_at": sync_result.synced_at, + } + except Exception as sync_err: + # Keep state consistent by rolling back the new release. + try: + rollback = await client.skills.rollback_release( + str(release_json.get("id", "")) + ) + rollback_json = _to_jsonable(rollback) + except Exception as rollback_err: + return ( + "Error promoting skill candidate: stable release synced failed; " + f"auto rollback also failed. sync_error={sync_err}; " + f"rollback_error={rollback_err}" + ) + return ( + "Error promoting skill candidate: stable release synced failed; " + f"auto rollback succeeded. sync_error={sync_err}; " + f"rollback={_to_json_text(rollback_json)}" + ) + + return _to_json_text( + { + "release": release_json, + "sync": sync_json, + "rollback": rollback_json, + } + ) + except Exception as e: + return f"Error promoting skill candidate: {str(e)}" + + +@dataclass +class ListSkillReleasesTool(FunctionTool): + name: str = "astrbot_list_skill_releases" + description: str = "List skill releases." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "skill_key": {"type": "string"}, + "active_only": {"type": "boolean", "default": False}, + "stage": {"type": "string"}, + "limit": {"type": "integer", "default": 100}, + "offset": {"type": "integer", "default": 0}, + }, + "required": [], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + skill_key: str | None = None, + active_only: bool = False, + stage: str | None = None, + limit: int = 100, + offset: int = 0, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + client, _sandbox = await _get_neo_context(context) + result = await client.skills.list_releases( + skill_key=skill_key, + active_only=active_only, + stage=stage, + limit=limit, + offset=offset, + ) + return _to_json_text(result) + except Exception as e: + return f"Error listing skill releases: {str(e)}" + + +@dataclass +class RollbackSkillReleaseTool(FunctionTool): + name: str = "astrbot_rollback_skill_release" + description: str = "Rollback one skill release." + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "release_id": {"type": "string"}, + }, + "required": ["release_id"], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + release_id: str, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + client, _sandbox = await _get_neo_context(context) + result = await client.skills.rollback_release(release_id) + return _to_json_text(result) + except Exception as e: + return f"Error rolling back skill release: {str(e)}" + + +@dataclass +class SyncSkillReleaseTool(FunctionTool): + name: str = "astrbot_sync_skill_release" + description: str = ( + "Sync stable Neo release payload to local SKILL.md and update mapping metadata." + ) + parameters: dict = field( + default_factory=lambda: { + "type": "object", + "properties": { + "release_id": {"type": "string"}, + "skill_key": {"type": "string"}, + "require_stable": {"type": "boolean", "default": True}, + }, + "required": [], + } + ) + + async def call( + self, + context: ContextWrapper[AstrAgentContext], + release_id: str | None = None, + skill_key: str | None = None, + require_stable: bool = True, + ) -> ToolExecResult: + if err := _ensure_admin(context): + return err + try: + client, _sandbox = await _get_neo_context(context) + sync_mgr = NeoSkillSyncManager() + result = await sync_mgr.sync_release( + client, + release_id=release_id, + skill_key=skill_key, + require_stable=require_stable, + ) + return _to_json_text( + { + "skill_key": result.skill_key, + "local_skill_name": result.local_skill_name, + "release_id": result.release_id, + "candidate_id": result.candidate_id, + "payload_ref": result.payload_ref, + "map_path": result.map_path, + "synced_at": result.synced_at, + } + ) + except Exception as e: + return f"Error syncing skill release: {str(e)}" diff --git a/astrbot/core/config/default.py b/astrbot/core/config/default.py index 546768812..dd7e03b97 100644 --- a/astrbot/core/config/default.py +++ b/astrbot/core/config/default.py @@ -120,11 +120,15 @@ }, "computer_use_runtime": "local", "sandbox": { - "booter": "shipyard", + "booter": "shipyard_neo", "shipyard_endpoint": "", "shipyard_access_token": "", "shipyard_ttl": 3600, "shipyard_max_sessions": 10, + "shipyard_neo_endpoint": "", + "shipyard_neo_access_token": "", + "shipyard_neo_profile": "python-default", + "shipyard_neo_ttl": 3600, }, }, # SubAgent orchestrator mode: @@ -2674,12 +2678,48 @@ class ChatProviderTemplate(TypedDict): "provider_settings.sandbox.booter": { "description": "沙箱环境驱动器", "type": "string", - "options": ["shipyard"], - "labels": ["Shipyard"], + "options": ["shipyard_neo", "shipyard"], + "labels": ["Shipyard Neo", "Shipyard"], "condition": { "provider_settings.computer_use_runtime": "sandbox", }, }, + "provider_settings.sandbox.shipyard_neo_endpoint": { + "description": "Shipyard Neo API Endpoint", + "type": "string", + "hint": "Shipyard Neo(Bay) 服务的 API 访问地址。", + "condition": { + "provider_settings.computer_use_runtime": "sandbox", + "provider_settings.sandbox.booter": "shipyard_neo", + }, + }, + "provider_settings.sandbox.shipyard_neo_access_token": { + "description": "Shipyard Neo Access Token", + "type": "string", + "hint": "用于访问 Shipyard Neo(Bay) 的访问令牌。", + "condition": { + "provider_settings.computer_use_runtime": "sandbox", + "provider_settings.sandbox.booter": "shipyard_neo", + }, + }, + "provider_settings.sandbox.shipyard_neo_profile": { + "description": "Shipyard Neo Profile", + "type": "string", + "hint": "Shipyard Neo 沙箱 profile,如 python-default。", + "condition": { + "provider_settings.computer_use_runtime": "sandbox", + "provider_settings.sandbox.booter": "shipyard_neo", + }, + }, + "provider_settings.sandbox.shipyard_neo_ttl": { + "description": "Shipyard Neo Sandbox TTL", + "type": "int", + "hint": "Shipyard Neo 沙箱生存时间(秒)。", + "condition": { + "provider_settings.computer_use_runtime": "sandbox", + "provider_settings.sandbox.booter": "shipyard_neo", + }, + }, "provider_settings.sandbox.shipyard_endpoint": { "description": "Shipyard API Endpoint", "type": "string", diff --git a/astrbot/core/skills/neo_skill_sync.py b/astrbot/core/skills/neo_skill_sync.py new file mode 100644 index 000000000..8e6c65aa4 --- /dev/null +++ b/astrbot/core/skills/neo_skill_sync.py @@ -0,0 +1,226 @@ +from __future__ import annotations + +import hashlib +import json +import os +import re +from dataclasses import dataclass +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +from astrbot.core.computer.computer_client import sync_skills_to_active_sandboxes +from astrbot.core.skills.skill_manager import SkillManager +from astrbot.core.utils.astrbot_path import get_astrbot_skills_path + +_MAP_VERSION = 1 +_MAP_FILE_NAME = "neo_skill_map.json" +_SKILL_NAME_RE = re.compile(r"[^a-zA-Z0-9._-]+") + + +def _now_iso() -> str: + return datetime.now(timezone.utc).isoformat() + + +def _to_jsonable(model_like: Any) -> dict[str, Any]: + if isinstance(model_like, dict): + return model_like + if hasattr(model_like, "model_dump"): + dumped = model_like.model_dump() + if isinstance(dumped, dict): + return dumped + return {} + + +@dataclass +class NeoSkillSyncResult: + skill_key: str + local_skill_name: str + release_id: str + candidate_id: str + payload_ref: str + map_path: str + synced_at: str + + +class NeoSkillSyncManager: + def __init__( + self, + *, + skills_root: str | None = None, + map_path: str | None = None, + ) -> None: + self.skills_root = skills_root or get_astrbot_skills_path() + self.map_path = map_path or str(Path(self.skills_root) / _MAP_FILE_NAME) + os.makedirs(self.skills_root, exist_ok=True) + + def _load_map(self) -> dict[str, Any]: + if not os.path.exists(self.map_path): + return {"version": _MAP_VERSION, "items": {}} + try: + with open(self.map_path, encoding="utf-8") as f: + data = json.load(f) + if not isinstance(data, dict): + return {"version": _MAP_VERSION, "items": {}} + items = data.get("items", {}) + if not isinstance(items, dict): + items = {} + return {"version": int(data.get("version", _MAP_VERSION)), "items": items} + except Exception: + return {"version": _MAP_VERSION, "items": {}} + + def _save_map(self, data: dict[str, Any]) -> None: + os.makedirs(os.path.dirname(self.map_path), exist_ok=True) + with open(self.map_path, "w", encoding="utf-8") as f: + json.dump(data, f, ensure_ascii=False, indent=2) + + @staticmethod + def normalize_skill_name(skill_key: str) -> str: + normalized = _SKILL_NAME_RE.sub("-", skill_key.strip().lower()) + normalized = normalized.strip("._-") + if not normalized: + normalized = "skill" + return f"neo_{normalized}" + + def _resolve_local_skill_name(self, skill_key: str, mapping: dict[str, Any]) -> str: + items = mapping.get("items", {}) + if not isinstance(items, dict): + items = {} + existing = items.get(skill_key) + if isinstance(existing, dict): + local_name = existing.get("local_skill_name") + if isinstance(local_name, str) and local_name: + return local_name + + base = self.normalize_skill_name(skill_key) + used_names = { + str(v.get("local_skill_name")) + for v in items.values() + if isinstance(v, dict) and v.get("local_skill_name") + } + if base not in used_names: + return base + suffix = hashlib.sha1(skill_key.encode("utf-8")).hexdigest()[:8] + return f"{base}-{suffix}" + + async def _find_release(self, client: Any, *, release_id: str) -> dict[str, Any]: + offset = 0 + while True: + page = await client.skills.list_releases(limit=100, offset=offset) + page_json = _to_jsonable(page) + items = page_json.get("items", []) + if not isinstance(items, list): + items = [] + for item in items: + if isinstance(item, dict) and item.get("id") == release_id: + return item + total = int(page_json.get("total", 0) or 0) + offset += len(items) + if offset >= total or not items: + break + raise ValueError(f"Release not found: {release_id}") + + async def _find_active_stable_release( + self, + client: Any, + *, + skill_key: str, + ) -> dict[str, Any]: + page = await client.skills.list_releases( + skill_key=skill_key, + active_only=True, + stage="stable", + limit=1, + offset=0, + ) + page_json = _to_jsonable(page) + items = page_json.get("items", []) + if not isinstance(items, list) or not items: + raise ValueError( + f"No active stable release found for skill_key: {skill_key}" + ) + if not isinstance(items[0], dict): + raise ValueError("Unexpected release payload format.") + return items[0] + + async def sync_release( + self, + client: Any, + *, + release_id: str | None = None, + skill_key: str | None = None, + require_stable: bool = True, + ) -> NeoSkillSyncResult: + if release_id: + release = await self._find_release(client, release_id=release_id) + elif skill_key: + release = await self._find_active_stable_release( + client, skill_key=skill_key + ) + else: + raise ValueError("release_id or skill_key is required for sync.") + + release_id_val = str(release.get("id") or "") + release_stage = str(release.get("stage") or "") + skill_key_val = str(release.get("skill_key") or "") + candidate_id = str(release.get("candidate_id") or "") + + if not release_id_val or not skill_key_val or not candidate_id: + raise ValueError("Release payload is incomplete.") + if require_stable and release_stage != "stable": + raise ValueError( + f"Only stable releases can be synced to local SKILL.md (got: {release_stage})." + ) + + candidate = await client.skills.get_candidate(candidate_id) + candidate_json = _to_jsonable(candidate) + payload_ref = candidate_json.get("payload_ref") + if not isinstance(payload_ref, str) or not payload_ref: + raise ValueError("Candidate payload_ref is missing.") + + payload_resp = await client.skills.get_payload(payload_ref) + payload_json = _to_jsonable(payload_resp) + payload = payload_json.get("payload") + if not isinstance(payload, dict): + raise ValueError("Skill payload must be a JSON object.") + + skill_markdown = payload.get("skill_markdown") + if not isinstance(skill_markdown, str) or not skill_markdown.strip(): + raise ValueError( + "payload.skill_markdown is required for stable sync to local skill." + ) + + mapping = self._load_map() + local_skill_name = self._resolve_local_skill_name(skill_key_val, mapping) + skill_dir = Path(self.skills_root) / local_skill_name + skill_dir.mkdir(parents=True, exist_ok=True) + + skill_md_path = skill_dir / "SKILL.md" + skill_md_path.write_text(skill_markdown, encoding="utf-8") + + items = mapping.setdefault("items", {}) + items[skill_key_val] = { + "local_skill_name": local_skill_name, + "latest_release_id": release_id_val, + "latest_candidate_id": candidate_id, + "latest_payload_ref": payload_ref, + "updated_at": _now_iso(), + } + mapping["version"] = _MAP_VERSION + self._save_map(mapping) + + # Ensure local skill is visible to AstrBot skill manager. + SkillManager().set_skill_active(local_skill_name, True) + + # Best-effort synchronization to active sandboxes. + await sync_skills_to_active_sandboxes() + + return NeoSkillSyncResult( + skill_key=skill_key_val, + local_skill_name=local_skill_name, + release_id=release_id_val, + candidate_id=candidate_id, + payload_ref=payload_ref, + map_path=self.map_path, + synced_at=_now_iso(), + ) diff --git a/astrbot/core/skills/skill_manager.py b/astrbot/core/skills/skill_manager.py index 1e6f01a6d..fab38d1d1 100644 --- a/astrbot/core/skills/skill_manager.py +++ b/astrbot/core/skills/skill_manager.py @@ -7,6 +7,7 @@ import tempfile import zipfile from dataclasses import dataclass +from datetime import datetime, timezone from pathlib import Path, PurePosixPath from astrbot.core.utils.astrbot_path import ( @@ -16,9 +17,11 @@ ) SKILLS_CONFIG_FILENAME = "skills.json" +SANDBOX_SKILLS_CACHE_FILENAME = "sandbox_skills_cache.json" DEFAULT_SKILLS_CONFIG: dict[str, dict] = {"skills": {}} # SANDBOX_SKILLS_ROOT = "/home/shared/skills" SANDBOX_SKILLS_ROOT = "skills" +_SANDBOX_SKILLS_CACHE_VERSION = 1 _SKILL_NAME_RE = re.compile(r"^[A-Za-z0-9._-]+$") @@ -91,7 +94,9 @@ def build_skills_prompt(skills: list[SkillInfo]) -> str: class SkillManager: def __init__(self, skills_root: str | None = None) -> None: self.skills_root = skills_root or get_astrbot_skills_path() - self.config_path = os.path.join(get_astrbot_data_path(), SKILLS_CONFIG_FILENAME) + data_path = Path(get_astrbot_data_path()) + self.config_path = str(data_path / SKILLS_CONFIG_FILENAME) + self.sandbox_skills_cache_path = str(data_path / SANDBOX_SKILLS_CACHE_FILENAME) os.makedirs(self.skills_root, exist_ok=True) os.makedirs(get_astrbot_temp_path(), exist_ok=True) @@ -109,6 +114,54 @@ def _save_config(self, config: dict) -> None: with open(self.config_path, "w", encoding="utf-8") as f: json.dump(config, f, ensure_ascii=False, indent=4) + def _load_sandbox_skills_cache(self) -> dict: + if not os.path.exists(self.sandbox_skills_cache_path): + return {"version": _SANDBOX_SKILLS_CACHE_VERSION, "skills": []} + try: + with open(self.sandbox_skills_cache_path, encoding="utf-8") as f: + data = json.load(f) + if not isinstance(data, dict): + return {"version": _SANDBOX_SKILLS_CACHE_VERSION, "skills": []} + skills = data.get("skills", []) + if not isinstance(skills, list): + skills = [] + return { + "version": int(data.get("version", _SANDBOX_SKILLS_CACHE_VERSION)), + "skills": skills, + } + except Exception: + return {"version": _SANDBOX_SKILLS_CACHE_VERSION, "skills": []} + + def _save_sandbox_skills_cache(self, cache: dict) -> None: + cache["version"] = _SANDBOX_SKILLS_CACHE_VERSION + cache["updated_at"] = datetime.now(timezone.utc).isoformat() + with open(self.sandbox_skills_cache_path, "w", encoding="utf-8") as f: + json.dump(cache, f, ensure_ascii=False, indent=2) + + def set_sandbox_skills_cache(self, skills: list[dict]) -> None: + """Persist sandbox skill metadata discovered from runtime side.""" + deduped: dict[str, dict[str, str]] = {} + for item in skills: + if not isinstance(item, dict): + continue + name = str(item.get("name", "")).strip() + if not name or not _SKILL_NAME_RE.match(name): + continue + description = str(item.get("description", "") or "") + path = str(item.get("path", "") or "") + if not path: + path = f"{SANDBOX_SKILLS_ROOT}/{name}/SKILL.md" + deduped[name] = { + "name": name, + "description": description, + "path": path.replace("\\", "/"), + } + cache = { + "version": _SANDBOX_SKILLS_CACHE_VERSION, + "skills": [deduped[name] for name in sorted(deduped)], + } + self._save_sandbox_skills_cache(cache) + def list_skills( self, *, @@ -125,7 +178,7 @@ def list_skills( config = self._load_config() skill_configs = config.get("skills", {}) modified = False - skills: list[SkillInfo] = [] + skills_by_name: dict[str, SkillInfo] = {} for entry in sorted(Path(self.skills_root).iterdir()): if not entry.is_dir(): @@ -151,20 +204,50 @@ def list_skills( else: path_str = str(skill_md) path_str = path_str.replace("\\", "/") - skills.append( - SkillInfo( + skills_by_name[skill_name] = SkillInfo( + name=skill_name, + description=description, + path=path_str, + active=active, + ) + + if runtime == "sandbox": + cache = self._load_sandbox_skills_cache() + for item in cache.get("skills", []): + if not isinstance(item, dict): + continue + skill_name = str(item.get("name", "")).strip() + if ( + not skill_name + or skill_name in skills_by_name + or not _SKILL_NAME_RE.match(skill_name) + ): + continue + active = skill_configs.get(skill_name, {}).get("active", True) + if skill_name not in skill_configs: + skill_configs[skill_name] = {"active": active} + modified = True + if active_only and not active: + continue + description = str(item.get("description", "") or "") + if show_sandbox_path: + path_str = f"{SANDBOX_SKILLS_ROOT}/{skill_name}/SKILL.md" + else: + path_str = str(item.get("path", "") or "") + if not path_str: + path_str = f"{SANDBOX_SKILLS_ROOT}/{skill_name}/SKILL.md" + skills_by_name[skill_name] = SkillInfo( name=skill_name, description=description, - path=path_str, + path=path_str.replace("\\", "/"), active=active, ) - ) if modified: config["skills"] = skill_configs self._save_config(config) - return skills + return [skills_by_name[name] for name in sorted(skills_by_name)] def set_skill_active(self, name: str, active: bool) -> None: config = self._load_config() diff --git a/astrbot/dashboard/routes/skills.py b/astrbot/dashboard/routes/skills.py index 5604d3d82..327cc6f41 100644 --- a/astrbot/dashboard/routes/skills.py +++ b/astrbot/dashboard/routes/skills.py @@ -1,15 +1,38 @@ import os import traceback +from typing import Any from quart import request from astrbot.core import DEMO_MODE, logger +from astrbot.core.computer.computer_client import sync_skills_to_active_sandboxes +from astrbot.core.skills.neo_skill_sync import NeoSkillSyncManager from astrbot.core.skills.skill_manager import SkillManager from astrbot.core.utils.astrbot_path import get_astrbot_temp_path from .route import Response, Route, RouteContext +def _to_jsonable(value: Any) -> Any: + if isinstance(value, dict): + return {k: _to_jsonable(v) for k, v in value.items()} + if isinstance(value, list): + return [_to_jsonable(v) for v in value] + if hasattr(value, "model_dump"): + return _to_jsonable(value.model_dump()) + return value + + +def _to_bool(value: Any, default: bool = False) -> bool: + if value is None: + return default + if isinstance(value, bool): + return value + if isinstance(value, str): + return value.strip().lower() in {"1", "true", "yes", "y", "on"} + return bool(value) + + class SkillsRoute(Route): def __init__(self, context: RouteContext, core_lifecycle) -> None: super().__init__(context) @@ -19,9 +42,32 @@ def __init__(self, context: RouteContext, core_lifecycle) -> None: "/skills/upload": ("POST", self.upload_skill), "/skills/update": ("POST", self.update_skill), "/skills/delete": ("POST", self.delete_skill), + "/skills/neo/candidates": ("GET", self.get_neo_candidates), + "/skills/neo/releases": ("GET", self.get_neo_releases), + "/skills/neo/payload": ("GET", self.get_neo_payload), + "/skills/neo/evaluate": ("POST", self.evaluate_neo_candidate), + "/skills/neo/promote": ("POST", self.promote_neo_candidate), + "/skills/neo/rollback": ("POST", self.rollback_neo_release), + "/skills/neo/sync": ("POST", self.sync_neo_release), } self.register_routes() + def _get_neo_client_config(self) -> tuple[str, str]: + provider_settings = self.core_lifecycle.astrbot_config.get( + "provider_settings", + {}, + ) + sandbox = provider_settings.get("sandbox", {}) + endpoint = sandbox.get("shipyard_neo_endpoint", "") + access_token = sandbox.get("shipyard_neo_access_token", "") + if not endpoint or not access_token: + raise ValueError( + "Shipyard Neo configuration is incomplete. " + "Please set provider_settings.sandbox.shipyard_neo_endpoint " + "and shipyard_neo_access_token." + ) + return endpoint, access_token + async def get_skills(self): try: provider_settings = self.core_lifecycle.astrbot_config.get( @@ -70,6 +116,11 @@ async def upload_skill(self): skill_mgr = SkillManager() skill_name = skill_mgr.install_skill_from_zip(temp_path, overwrite=True) + try: + await sync_skills_to_active_sandboxes() + except Exception: + logger.warning("Failed to sync uploaded skills to active sandboxes.") + return ( Response() .ok({"name": skill_name}, "Skill uploaded successfully.") @@ -117,7 +168,264 @@ async def delete_skill(self): if not name: return Response().error("Missing skill name").__dict__ SkillManager().delete_skill(name) + try: + await sync_skills_to_active_sandboxes() + except Exception: + logger.warning("Failed to sync deleted skills to active sandboxes.") return Response().ok({"name": name}).__dict__ except Exception as e: logger.error(traceback.format_exc()) return Response().error(str(e)).__dict__ + + async def get_neo_candidates(self): + try: + endpoint, access_token = self._get_neo_client_config() + status = request.args.get("status") + skill_key = request.args.get("skill_key") + limit = int(request.args.get("limit", 100)) + offset = int(request.args.get("offset", 0)) + + from shipyard_neo import BayClient + + async with BayClient( + endpoint_url=endpoint, + access_token=access_token, + ) as client: + candidates = await client.skills.list_candidates( + status=status, + skill_key=skill_key, + limit=limit, + offset=offset, + ) + return Response().ok(_to_jsonable(candidates)).__dict__ + except Exception as e: + logger.error(traceback.format_exc()) + return Response().error(str(e)).__dict__ + + async def get_neo_releases(self): + try: + endpoint, access_token = self._get_neo_client_config() + skill_key = request.args.get("skill_key") + stage = request.args.get("stage") + active_only = _to_bool(request.args.get("active_only"), False) + limit = int(request.args.get("limit", 100)) + offset = int(request.args.get("offset", 0)) + + from shipyard_neo import BayClient + + async with BayClient( + endpoint_url=endpoint, + access_token=access_token, + ) as client: + releases = await client.skills.list_releases( + skill_key=skill_key, + active_only=active_only, + stage=stage, + limit=limit, + offset=offset, + ) + return Response().ok(_to_jsonable(releases)).__dict__ + except Exception as e: + logger.error(traceback.format_exc()) + return Response().error(str(e)).__dict__ + + async def get_neo_payload(self): + try: + endpoint, access_token = self._get_neo_client_config() + payload_ref = request.args.get("payload_ref", "") + if not payload_ref: + return Response().error("Missing payload_ref").__dict__ + + from shipyard_neo import BayClient + + async with BayClient( + endpoint_url=endpoint, + access_token=access_token, + ) as client: + payload = await client.skills.get_payload(payload_ref) + return Response().ok(_to_jsonable(payload)).__dict__ + except Exception as e: + logger.error(traceback.format_exc()) + return Response().error(str(e)).__dict__ + + async def evaluate_neo_candidate(self): + if DEMO_MODE: + return ( + Response() + .error("You are not permitted to do this operation in demo mode") + .__dict__ + ) + try: + endpoint, access_token = self._get_neo_client_config() + data = await request.get_json() + candidate_id = data.get("candidate_id") + passed_value = data.get("passed") + if not candidate_id or passed_value is None: + return Response().error("Missing candidate_id or passed").__dict__ + passed = _to_bool(passed_value, False) + + from shipyard_neo import BayClient + + async with BayClient( + endpoint_url=endpoint, + access_token=access_token, + ) as client: + result = await client.skills.evaluate_candidate( + candidate_id, + passed=passed, + score=data.get("score"), + benchmark_id=data.get("benchmark_id"), + report=data.get("report"), + ) + return Response().ok(_to_jsonable(result)).__dict__ + except Exception as e: + logger.error(traceback.format_exc()) + return Response().error(str(e)).__dict__ + + async def promote_neo_candidate(self): + if DEMO_MODE: + return ( + Response() + .error("You are not permitted to do this operation in demo mode") + .__dict__ + ) + try: + endpoint, access_token = self._get_neo_client_config() + data = await request.get_json() + candidate_id = data.get("candidate_id") + stage = data.get("stage", "canary") + sync_to_local = _to_bool(data.get("sync_to_local"), True) + if not candidate_id: + return Response().error("Missing candidate_id").__dict__ + if stage not in {"canary", "stable"}: + return Response().error("Invalid stage, must be canary/stable").__dict__ + + from shipyard_neo import BayClient + + async with BayClient( + endpoint_url=endpoint, + access_token=access_token, + ) as client: + release = await client.skills.promote_candidate( + candidate_id, stage=stage + ) + release_json = _to_jsonable(release) + + sync_json = None + if stage == "stable" and sync_to_local: + sync_mgr = NeoSkillSyncManager() + try: + sync_result = await sync_mgr.sync_release( + client, + release_id=str(release_json.get("id", "")), + require_stable=True, + ) + sync_json = { + "skill_key": sync_result.skill_key, + "local_skill_name": sync_result.local_skill_name, + "release_id": sync_result.release_id, + "candidate_id": sync_result.candidate_id, + "payload_ref": sync_result.payload_ref, + "map_path": sync_result.map_path, + "synced_at": sync_result.synced_at, + } + except Exception as sync_err: + rollback_result = await client.skills.rollback_release( + str(release_json.get("id", "")) + ) + resp = Response().error( + "Stable promote synced failed and has been rolled back. " + f"sync_error={sync_err}" + ) + resp.data = { + "release": release_json, + "rollback": _to_jsonable(rollback_result), + } + return resp.__dict__ + + # Try to push latest local skills to all active sandboxes. + try: + await sync_skills_to_active_sandboxes() + except Exception: + logger.warning("Failed to sync skills to active sandboxes.") + + return ( + Response().ok({"release": release_json, "sync": sync_json}).__dict__ + ) + except Exception as e: + logger.error(traceback.format_exc()) + return Response().error(str(e)).__dict__ + + async def rollback_neo_release(self): + if DEMO_MODE: + return ( + Response() + .error("You are not permitted to do this operation in demo mode") + .__dict__ + ) + try: + endpoint, access_token = self._get_neo_client_config() + data = await request.get_json() + release_id = data.get("release_id") + if not release_id: + return Response().error("Missing release_id").__dict__ + + from shipyard_neo import BayClient + + async with BayClient( + endpoint_url=endpoint, + access_token=access_token, + ) as client: + result = await client.skills.rollback_release(release_id) + return Response().ok(_to_jsonable(result)).__dict__ + except Exception as e: + logger.error(traceback.format_exc()) + return Response().error(str(e)).__dict__ + + async def sync_neo_release(self): + if DEMO_MODE: + return ( + Response() + .error("You are not permitted to do this operation in demo mode") + .__dict__ + ) + try: + endpoint, access_token = self._get_neo_client_config() + data = await request.get_json() + release_id = data.get("release_id") + skill_key = data.get("skill_key") + require_stable = _to_bool(data.get("require_stable"), True) + if not release_id and not skill_key: + return Response().error("Missing release_id or skill_key").__dict__ + + from shipyard_neo import BayClient + + async with BayClient( + endpoint_url=endpoint, + access_token=access_token, + ) as client: + sync_mgr = NeoSkillSyncManager() + result = await sync_mgr.sync_release( + client, + release_id=release_id, + skill_key=skill_key, + require_stable=require_stable, + ) + return ( + Response() + .ok( + { + "skill_key": result.skill_key, + "local_skill_name": result.local_skill_name, + "release_id": result.release_id, + "candidate_id": result.candidate_id, + "payload_ref": result.payload_ref, + "map_path": result.map_path, + "synced_at": result.synced_at, + } + ) + .__dict__ + ) + except Exception as e: + logger.error(traceback.format_exc()) + return Response().error(str(e)).__dict__ diff --git a/dashboard/src/components/extension/SkillsSection.vue b/dashboard/src/components/extension/SkillsSection.vue index becd09ea3..686890247 100644 --- a/dashboard/src/components/extension/SkillsSection.vue +++ b/dashboard/src/components/extension/SkillsSection.vue @@ -1,60 +1,196 @@