From 35d03ecd2d03ad909f6d34d003d570db83462e7f Mon Sep 17 00:00:00 2001 From: TrueNine Date: Fri, 24 Apr 2026 19:28:28 +0800 Subject: [PATCH 01/45] fix: build release binaries before packaging smoke tests in CI --- .github/workflows/ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8e281253..6b30e439 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -89,6 +89,9 @@ jobs: with: cache-key: ci-packaging-smoke + - name: Build release binaries (for packaging smoke) + run: cargo build --release -p tnmsc -p tnmsm + - name: CLI packaging smoke run: cargo test -p tnmsc-integrate-tests packaging_smoke_covers_release_binary_and_global_install -- --exact --nocapture From 211066da6f5dc3c9c92f490be72cca82a4fb8c92 Mon Sep 17 00:00:00 2001 From: TrueNine Date: Sat, 25 Apr 2026 09:48:44 +0800 Subject: [PATCH 02/45] refactor: extract shared code to sdk, fix local test framework bugs - Move strip_unc_prefix, build_global_scope, collect_context to sdk/src/services/common.rs - Restructure logger into sdk/src/infra/logger/ module (core, diagnostic, formatter, sink) - Rename command_diagnostics.rs -> command_diagnostics_service.rs, prompts.rs -> prompt_service.rs - Add OutputContext, CleanupSnapshot domain types - Extract git_fs module from git_discovery - Pass &Logger to collect_context and build_output_files - Fix PoisonError cascade in local tests: use unwrap_or_else for Mutex - Add cross-process file lock to prevent test binary interference - Add clean-before-install in 3 tests that lacked it - Fix dry_run encoding: use file.encoding.clone() instead of None - Remove unused mut, functions, fields (compiler warnings) - Add logging test files for observability coverage --- cli/local-tests/src/lib.rs | 69 +- cli/local-tests/tests/claude_smoke.rs | 3 + cli/local-tests/tests/clean_blackbox.rs | 8 + cli/local-tests/tests/logging_clean.rs | 55 + cli/local-tests/tests/logging_dry_run.rs | 52 + .../tests/logging_error_feedback.rs | 63 + .../tests/logging_install_observability.rs | 99 ++ cli/local-tests/tests/logging_levels.rs | 106 ++ cli/src/cli.rs | 20 - cli/src/commands/pipeline.rs | 50 +- cli/src/lib.rs | 12 +- cli/src/logger.rs | 74 -- mcp/src/main.rs | 22 +- sdk/src/context/mod.rs | 4 +- sdk/src/domain/base_output_plans.rs | 6 +- sdk/src/domain/cleanup.rs | 141 ++ sdk/src/domain/config/mod.rs | 10 +- sdk/src/domain/mod.rs | 7 + sdk/src/domain/output_context.rs | 53 + .../output_plans/claude_code_output_plan.rs | 4 +- .../domain/output_plans/codex_output_plan.rs | 13 +- .../domain/output_plans/cursor_output_plan.rs | 4 +- .../domain/output_plans/droid_output_plan.rs | 6 +- .../domain/output_plans/gemini_output_plan.rs | 4 +- .../generic_skills_output_plan.rs | 4 +- ...etbrains_ai_assistant_codex_output_plan.rs | 4 +- .../domain/output_plans/kiro_output_plan.rs | 4 +- .../output_plans/opencode_output_plan.rs | 4 +- .../domain/output_plans/qoder_output_plan.rs | 4 +- .../domain/output_plans/trae_output_plan.rs | 4 +- .../domain/output_plans/warp_output_plan.rs | 4 +- .../output_plans/windsurf_output_plan.rs | 4 +- sdk/src/infra/git_fs.rs | 196 +++ sdk/src/infra/logger.rs | 1175 ----------------- sdk/src/infra/logger/core.rs | 263 ++++ sdk/src/infra/logger/diagnostic.rs | 217 +++ sdk/src/infra/logger/formatter.rs | 228 ++++ sdk/src/infra/logger/mod.rs | 305 +++++ sdk/src/infra/logger/sink.rs | 133 ++ sdk/src/infra/mod.rs | 2 + sdk/src/infra/script_runtime.rs | 8 +- sdk/src/lib.rs | 2 +- sdk/src/policy/cleanup.rs | 208 +-- sdk/src/policy/git_discovery.rs | 193 +-- sdk/src/policy/path_blocking.rs | 7 +- sdk/src/services/clean_service.rs | 745 ++--------- ...tics.rs => command_diagnostics_service.rs} | 12 +- sdk/src/services/common.rs | 521 ++++++++ sdk/src/services/dry_run_service.rs | 559 ++------ sdk/src/services/install_service.rs | 605 ++------- sdk/src/services/mod.rs | 5 +- .../{prompts.rs => prompt_service.rs} | 23 +- 52 files changed, 3022 insertions(+), 3302 deletions(-) create mode 100644 cli/local-tests/tests/logging_clean.rs create mode 100644 cli/local-tests/tests/logging_dry_run.rs create mode 100644 cli/local-tests/tests/logging_error_feedback.rs create mode 100644 cli/local-tests/tests/logging_install_observability.rs create mode 100644 cli/local-tests/tests/logging_levels.rs delete mode 100644 cli/src/logger.rs create mode 100644 sdk/src/domain/cleanup.rs create mode 100644 sdk/src/domain/output_context.rs create mode 100644 sdk/src/infra/git_fs.rs delete mode 100644 sdk/src/infra/logger.rs create mode 100644 sdk/src/infra/logger/core.rs create mode 100644 sdk/src/infra/logger/diagnostic.rs create mode 100644 sdk/src/infra/logger/formatter.rs create mode 100644 sdk/src/infra/logger/mod.rs create mode 100644 sdk/src/infra/logger/sink.rs rename sdk/src/services/{command_diagnostics.rs => command_diagnostics_service.rs} (92%) create mode 100644 sdk/src/services/common.rs rename sdk/src/services/{prompts.rs => prompt_service.rs} (97%) diff --git a/cli/local-tests/src/lib.rs b/cli/local-tests/src/lib.rs index 0bc9b510..f795b84a 100644 --- a/cli/local-tests/src/lib.rs +++ b/cli/local-tests/src/lib.rs @@ -4,6 +4,7 @@ use std::fs; use std::path::{Path, PathBuf}; use std::process::{Command, Output}; use std::sync::{Mutex, OnceLock}; +use std::time::Duration; static BINARY_BUILT: OnceLock<()> = OnceLock::new(); static PROJECT_LOCK: OnceLock> = OnceLock::new(); @@ -38,6 +39,7 @@ pub struct LocalTestRunner { binary: PathBuf, cwd: PathBuf, _lock_guard: std::sync::MutexGuard<'static, ()>, + _file_lock: CrossProcessLock, } impl LocalTestRunner { @@ -45,11 +47,13 @@ impl LocalTestRunner { /// 若该目录不存在,则回退到当前目录。 pub fn new() -> Self { ensure_binary(); - // 所有测试共享同一个真实项目目录,必须串行执行 + // Cross-process lock: serialises test binaries sharing the same project + let file_lock = acquire_cross_process_lock(); + // In-process lock: serialises tests within a single binary let guard = PROJECT_LOCK .get_or_init(|| Mutex::new(())) .lock() - .expect("project lock should not be poisoned"); + .unwrap_or_else(|e| e.into_inner()); let default_project = home_dir().join("workspace").join("memory-sync"); let cwd = if default_project.is_dir() { default_project @@ -60,15 +64,17 @@ impl LocalTestRunner { binary: binary_path(), cwd, _lock_guard: guard, + _file_lock: file_lock, } } pub fn with_cwd(cwd: impl AsRef) -> Self { ensure_binary(); + let file_lock = acquire_cross_process_lock(); let guard = PROJECT_LOCK .get_or_init(|| Mutex::new(())) .lock() - .expect("project lock should not be poisoned"); + .unwrap_or_else(|e| e.into_inner()); let cwd = cwd.as_ref().to_path_buf(); assert!( cwd.is_dir(), @@ -79,6 +85,7 @@ impl LocalTestRunner { binary: binary_path(), cwd, _lock_guard: guard, + _file_lock: file_lock, } } @@ -126,6 +133,21 @@ impl LocalTestRunner { command_output(&mut cmd, &format!("tnmsc {}", args.join(" "))) } + /// 在指定目录下运行 tnmsc 命令,并设置额外环境变量。 + pub fn run_at_with_env( + &self, + cwd: impl AsRef, + args: &[&str], + envs: &[(&str, &str)], + ) -> CommandResult { + let mut cmd = Command::new(&self.binary); + cmd.args(args).current_dir(cwd.as_ref()); + for (k, v) in envs { + cmd.env(k, v); + } + command_output(&mut cmd, &format!("tnmsc {}", args.join(" "))) + } + pub fn run_success(&self, args: &[&str]) -> CommandResult { let result = self.run(args); result.assert_success(&format!("tnmsc {}", args.join(" "))); @@ -341,6 +363,47 @@ impl LocalTestRunner { } } +// --------------------------------------------------------------------------- +// Cross-process file lock — prevents test binaries from interfering with each +// other when running local tests on the shared project directory. +// --------------------------------------------------------------------------- + +pub struct CrossProcessLock(Option); + +impl Drop for CrossProcessLock { + fn drop(&mut self) { + if let Some(path) = self.0.take() { + let _ = std::fs::remove_file(&path); + } + } +} + +fn acquire_cross_process_lock() -> CrossProcessLock { + let lock_path = home_dir().join(".tnmsc_local_test_lock"); + loop { + match std::fs::File::create_new(&lock_path) { + Ok(_) => return CrossProcessLock(Some(lock_path)), + Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => { + // Stale-lock detection: if older than 5 minutes, remove and retry + if let Ok(meta) = std::fs::metadata(&lock_path) { + if let Ok(created) = meta.created() { + if let Ok(elapsed) = created.elapsed() { + if elapsed > Duration::from_secs(300) { + let _ = std::fs::remove_file(&lock_path); + continue; + } + } + } + } + std::thread::sleep(Duration::from_millis(200)); + } + Err(_) => { + std::thread::sleep(Duration::from_millis(200)); + } + } + } +} + pub fn ensure_binary() { let binary = binary_path(); diff --git a/cli/local-tests/tests/claude_smoke.rs b/cli/local-tests/tests/claude_smoke.rs index 30b92555..202b7c55 100644 --- a/cli/local-tests/tests/claude_smoke.rs +++ b/cli/local-tests/tests/claude_smoke.rs @@ -101,6 +101,9 @@ fn local_claude_clean_removes_all_project_files() { let runner = LocalTestRunner::new(); runner.assert_project_ready(); + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + let install = runner.install(); install.assert_success("tnmsc install before clean"); diff --git a/cli/local-tests/tests/clean_blackbox.rs b/cli/local-tests/tests/clean_blackbox.rs index c5897eb8..78c814a5 100644 --- a/cli/local-tests/tests/clean_blackbox.rs +++ b/cli/local-tests/tests/clean_blackbox.rs @@ -22,6 +22,10 @@ fn local_clean_removes_project_claude_md() { let runner = LocalTestRunner::new(); runner.assert_project_ready(); + // 先 clean 再 install 确保可复现 + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + // 先 install 生成文件 let install = runner.install(); install.assert_success("tnmsc install before clean"); @@ -45,6 +49,10 @@ fn local_clean_dry_run_does_not_remove_files() { let runner = LocalTestRunner::new(); runner.assert_project_ready(); + // 先 clean 再 install 确保可复现 + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + // 先 install 生成文件 let install = runner.install(); install.assert_success("tnmsc install before dry-run clean"); diff --git a/cli/local-tests/tests/logging_clean.rs b/cli/local-tests/tests/logging_clean.rs new file mode 100644 index 00000000..001402dc --- /dev/null +++ b/cli/local-tests/tests/logging_clean.rs @@ -0,0 +1,55 @@ +//! Clean 可观测性测试:验证 clean 命令输出足够的可观测信息。 + +use tnmsc_local_tests::LocalTestRunner; + +#[test] +fn clean_outputs_key_spans_and_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + // 先 install 生成文件,再 clean + let install = runner.install(); + install.assert_success("tnmsc install before clean"); + + let result = runner.run(&["--trace", "clean"]); + result.assert_success("tnmsc --trace clean"); + + // 验证顶层事件 + assert!( + result.stdout.contains("### Running clean"), + "clean should output 'Running clean'. stdout:\n{}", + result.stdout + ); + + // 验证主要 Span + assert!( + result.stdout.contains("### cleanup.discover started"), + "clean should output 'cleanup.discover' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### cleanup.execute started"), + "clean should output 'cleanup.execute' span. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn clean_outputs_deletion_summary() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + // 先 install 生成文件,再 clean + let install = runner.install(); + install.assert_success("tnmsc install before clean"); + + let result = runner.run(&["--info", "clean"]); + result.assert_success("tnmsc --info clean"); + + // Info 级别应该输出删除摘要 + assert!( + result.stdout.contains("Deleted") || result.stdout.contains("No files needed updates"), + "clean should output deletion summary. stdout:\n{}", + result.stdout + ); +} diff --git a/cli/local-tests/tests/logging_dry_run.rs b/cli/local-tests/tests/logging_dry_run.rs new file mode 100644 index 00000000..6e1c1b17 --- /dev/null +++ b/cli/local-tests/tests/logging_dry_run.rs @@ -0,0 +1,52 @@ +//! Dry-run 可观测性测试:验证 dry-run 命令输出足够的可观测信息。 + +use tnmsc_local_tests::LocalTestRunner; + +#[test] +fn dry_run_outputs_key_spans_and_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let result = runner.run(&["--trace", "dry-run"]); + result.assert_success("tnmsc --trace dry-run"); + + // 验证顶层事件 + assert!( + result.stdout.contains("### Running dry-run"), + "dry-run should output 'Running dry-run'. stdout:\n{}", + result.stdout + ); + + // 验证主要 Span + assert!( + result.stdout.contains("### config.load started"), + "dry-run should output 'config.load' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### context.collect started"), + "dry-run should output 'context.collect' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### output.build started"), + "dry-run should output 'output.build' span. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn dry_run_outputs_plan_preview() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let result = runner.run(&["--info", "dry-run"]); + result.assert_success("tnmsc --info dry-run"); + + // Info 级别应该输出计划摘要 + assert!( + result.stdout.contains("Planned") || result.stdout.contains("No files needed updates"), + "dry-run should output plan summary. stdout:\n{}", + result.stdout + ); +} diff --git a/cli/local-tests/tests/logging_error_feedback.rs b/cli/local-tests/tests/logging_error_feedback.rs new file mode 100644 index 00000000..952bd969 --- /dev/null +++ b/cli/local-tests/tests/logging_error_feedback.rs @@ -0,0 +1,63 @@ +//! 错误反馈测试:验证错误时输出结构化诊断信息。 + +use std::fs; +use tnmsc_local_tests::LocalTestRunner; + +fn run_without_global_config( + runner: &LocalTestRunner, + args: &[&str], +) -> tnmsc_local_tests::CommandResult { + let temp_home = std::env::temp_dir().join("tnmsc_test_home"); + let _ = fs::remove_dir_all(&temp_home); + fs::create_dir_all(&temp_home).unwrap(); + // Point TNMSC_CONFIG_PATH to a non-existent file so global config is not found. + let fake_config = temp_home.join(".tnmsc.json"); + runner.run_at_with_env( + std::env::temp_dir(), + args, + &[("TNMSC_CONFIG_PATH", fake_config.to_str().unwrap())], + ) +} + +#[test] +fn missing_config_outputs_diagnostic_with_fix() { + let runner = LocalTestRunner::new(); + // 在临时目录运行(没有 .tnmsc.json),并隔离全局配置 + let result = run_without_global_config(&runner, &["install"]); + result.assert_failure("install without config"); + + // 验证诊断结构存在 + assert!( + result.stderr.contains("What happened") || result.stdout.contains("What happened"), + "error should contain 'What happened' section. stdout:\n{}\nstderr:\n{}", + result.stdout, result.stderr + ); + + // 验证有修复建议(嵌入在错误消息中) + assert!( + result.stderr.contains("Please create it") || result.stdout.contains("Please create it"), + "error should contain fix suggestion. stdout:\n{}\nstderr:\n{}", + result.stdout, result.stderr + ); + + // 验证提及配置文件 + assert!( + result.stderr.contains(".tnmsc.json") || result.stdout.contains(".tnmsc.json"), + "error should mention .tnmsc.json. stdout:\n{}\nstderr:\n{}", + result.stdout, result.stderr + ); +} + +#[test] +fn missing_config_at_error_level_shows_diagnostic() { + let runner = LocalTestRunner::new(); + let result = run_without_global_config(&runner, &["--error", "install"]); + result.assert_failure("install without config at error level"); + + // Error 级别也应该显示诊断 + assert!( + result.stderr.contains("What happened"), + "--error should still show diagnostic. stderr:\n{}", + result.stderr + ); +} diff --git a/cli/local-tests/tests/logging_install_observability.rs b/cli/local-tests/tests/logging_install_observability.rs new file mode 100644 index 00000000..1993c75f --- /dev/null +++ b/cli/local-tests/tests/logging_install_observability.rs @@ -0,0 +1,99 @@ +//! Install 可观测性测试:验证 install 命令输出足够的可观测信息。 + +use tnmsc_local_tests::LocalTestRunner; + +#[test] +fn install_outputs_key_spans_and_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + + let result = runner.run(&["--trace", "install"]); + result.assert_success("tnmsc --trace install"); + + // 验证顶层事件 + assert!( + result.stdout.contains("### Install started"), + "install should output 'Install started'. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### Install completed"), + "install should output 'Install completed'. stdout:\n{}", + result.stdout + ); + + // 验证主要 Span + assert!( + result.stdout.contains("### config.load started"), + "install should output 'config.load' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### context.collect started"), + "install should output 'context.collect' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### output.build started"), + "install should output 'output.build' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### files.write started"), + "install should output 'files.write' span. stdout:\n{}", + result.stdout + ); + + // 验证 collector span + assert!( + result.stdout.contains("### collect.aindex_resolvers started"), + "install should output 'collect.aindex_resolvers' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### collect.project_prompt started"), + "install should output 'collect.project_prompt' span. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn install_outputs_plugin_resolution() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + let result = runner.run(&["--info", "install"]); + result.assert_success("tnmsc --info install"); + + // 验证插件解析信息 + assert!( + result.stdout.contains("Plugins resolved"), + "install should output plugin resolution. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn install_outputs_file_write_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + let result = runner.run(&["--debug", "install"]); + result.assert_success("tnmsc --debug install"); + + // 验证文件写入事件(应该有文件被写入) + assert!( + result.stdout.contains("file.written") || result.stdout.contains("file.skipped"), + "install should output file write events. stdout:\n{}", + result.stdout + ); +} diff --git a/cli/local-tests/tests/logging_levels.rs b/cli/local-tests/tests/logging_levels.rs new file mode 100644 index 00000000..b04ba3dc --- /dev/null +++ b/cli/local-tests/tests/logging_levels.rs @@ -0,0 +1,106 @@ +//! 日志级别测试:验证不同日志级别下的输出行为。 + +use tnmsc_local_tests::LocalTestRunner; + +#[test] +fn trace_level_outputs_span_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + // clean 后 install,确保有文件写入操作 + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + let result = runner.run(&["--trace", "install"]); + result.assert_success("tnmsc --trace install"); + + // Trace 级别应该输出 collector span + assert!( + result.stdout.contains("### collect.aindex_resolvers started"), + "--trace should output collector spans. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### config.load started"), + "--trace should output config span. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn info_level_outputs_top_level_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + let result = runner.install(); // 默认 info 级别 + result.assert_success("tnmsc install"); + + // Info 级别应该输出顶层事件 + assert!( + result.stdout.contains("### Install started"), + "default level should output 'Install started'. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### Install completed"), + "default level should output 'Install completed'. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn error_level_only_outputs_errors() { + let runner = LocalTestRunner::new(); + // 在一个没有 config 的目录运行,并隔离全局配置,触发错误 + let temp_home = std::env::temp_dir().join("tnmsc_test_home"); + let _ = std::fs::remove_dir_all(&temp_home); + std::fs::create_dir_all(&temp_home).unwrap(); + let fake_config = temp_home.join(".tnmsc.json"); + let result = runner.run_at_with_env( + std::env::temp_dir(), + &["--error", "install"], + &[("TNMSC_CONFIG_PATH", fake_config.to_str().unwrap())], + ); + result.assert_failure("tnmsc --error install without config"); + + // Error 级别不应该输出 info 事件 + assert!( + !result.stdout.contains("### Install started"), + "--error should not output info events. stdout:\n{}", + result.stdout + ); + + // 但应该输出错误诊断 + assert!( + result.stderr.contains("What happened") || result.stderr.contains("error"), + "--error should output error diagnostics. stderr:\n{}", + result.stderr + ); +} + +#[test] +fn debug_level_outputs_debug_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + let result = runner.run(&["--debug", "install"]); + result.assert_success("tnmsc --debug install"); + + // Debug 级别应该输出更多上下文 + assert!( + result.stdout.contains("### Context collected"), + "--debug should output 'Context collected'. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### Output files built"), + "--debug should output 'Output files built'. stdout:\n{}", + result.stdout + ); +} diff --git a/cli/src/cli.rs b/cli/src/cli.rs index 02020fdd..0ab42be5 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -6,8 +6,6 @@ use std::path::PathBuf; use clap::{Args, Parser, Subcommand}; -use crate::logger::LogLevel; - /// Cross-AI-tool prompt synchronisation CLI #[derive(Parser, Debug)] #[command( @@ -115,25 +113,7 @@ impl ResolvedLogLevel { } } - pub fn to_logger_level(self) -> LogLevel { - match self { - Self::Trace => LogLevel::Trace, - Self::Debug => LogLevel::Debug, - Self::Info => LogLevel::Info, - Self::Warn => LogLevel::Warn, - Self::Error => LogLevel::Error, - } - } - pub fn to_sdk_logger_level(self) -> tnmsd::infra::logger::LogLevel { - match self { - Self::Trace => tnmsd::infra::logger::LogLevel::Trace, - Self::Debug => tnmsd::infra::logger::LogLevel::Debug, - Self::Info => tnmsd::infra::logger::LogLevel::Info, - Self::Warn => tnmsd::infra::logger::LogLevel::Warn, - Self::Error => tnmsd::infra::logger::LogLevel::Error, - } - } } /// Resolve log level from CLI flags. diff --git a/cli/src/commands/pipeline.rs b/cli/src/commands/pipeline.rs index 824719c7..af282096 100644 --- a/cli/src/commands/pipeline.rs +++ b/cli/src/commands/pipeline.rs @@ -1,8 +1,8 @@ use std::process::ExitCode; -use serde_json::Value; +use serde_json::{Value, json}; -use crate::logger; +use tnmsd::infra::logger::{Logger, create_logger, flush}; #[derive(Debug, PartialEq, Eq)] struct RenderedCommandResult { @@ -155,30 +155,44 @@ fn render_entry(label: &str, value: &Value) -> Vec { } } -fn log_command_start(command_name: &str) { - logger::info(&format!("Running {command_name}")); +fn log_command_start(logger: &Logger, command_name: &str) { + logger.info(format!("Running {command_name}"), None); if let Ok(current_dir) = std::env::current_dir() { - logger::debug(&format!("currentDir={}", current_dir.display())); + logger.debug( + "currentDir", + Some(json!({ "currentDir": current_dir.display().to_string() })), + ); } } fn log_command_finish( + logger: &Logger, command_name: &str, result: &Result, ) { match result { Ok(command_result) => { - logger::debug(&format!( - "{command_name} result: success={}, filesAffected={}, dirsAffected={}, warnings={}, errors={}", - command_result.success, - command_result.files_affected, - command_result.dirs_affected, - command_result.warnings.len(), - command_result.errors.len(), - )); + logger.debug( + "command result", + Some(json!({ + "command": command_name, + "success": command_result.success, + "filesAffected": command_result.files_affected, + "dirsAffected": command_result.dirs_affected, + "warnings": command_result.warnings.len(), + "errors": command_result.errors.len(), + })), + ); } Err(error) => { - logger::error(&format!("{command_name} failed: {error}")); + logger.error(tnmsd::infra::logger::DiagnosticInput { + code: "COMMAND_FAILED".to_string(), + title: format!("{command_name} failed"), + root_cause: vec![error.to_string()], + exact_fix: None, + possible_fixes: None, + details: None, + }); } } } @@ -190,9 +204,10 @@ fn run_command( ) -> Result, options: tnmsd::MemorySyncCommandOptions, ) -> ExitCode { - log_command_start(command_name); + let logger = create_logger("pipeline", None); + log_command_start(&logger, command_name); let result = operation(options); - log_command_finish(command_name, &result); + log_command_finish(&logger, command_name, &result); let rendered = render_result(result); for line in rendered.stdout_lines { @@ -202,8 +217,7 @@ fn run_command( eprintln!("{line}"); } - logger::flush_output(); - tnmsd::infra::logger::flush_output(); + flush(); if rendered.success { ExitCode::SUCCESS diff --git a/cli/src/lib.rs b/cli/src/lib.rs index 96dbf2d2..7b6e5a0b 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -1,17 +1,23 @@ mod cli; mod commands; -mod logger; use std::process::ExitCode; use clap::Parser; +use tnmsd::infra::logger::{LogLevel, set_global_level}; pub fn run() -> ExitCode { let args = cli::Cli::parse(); if let Some(level) = cli::resolve_log_level(&args) { - logger::set_global_log_level(level.to_logger_level()); - tnmsd::infra::logger::set_global_log_level(level.to_sdk_logger_level()); + let log_level = match level { + cli::ResolvedLogLevel::Trace => LogLevel::Trace, + cli::ResolvedLogLevel::Debug => LogLevel::Debug, + cli::ResolvedLogLevel::Info => LogLevel::Info, + cli::ResolvedLogLevel::Warn => LogLevel::Warn, + cli::ResolvedLogLevel::Error => LogLevel::Error, + }; + set_global_level(log_level); } match cli::resolve_command(&args) { diff --git a/cli/src/logger.rs b/cli/src/logger.rs deleted file mode 100644 index bc5ef684..00000000 --- a/cli/src/logger.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::sync::OnceLock; - -static LOGGER: OnceLock = OnceLock::new(); - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)] -pub enum LogLevel { - Trace, - Debug, - Info, - Warn, - Error, -} - -struct Logger { - level: LogLevel, -} - -impl Logger { - fn new(level: LogLevel) -> Self { - Self { level } - } - - fn log(&self, level: LogLevel, message: &str) { - if level >= self.level { - eprintln!("[{}] {}", level_to_string(level), message); - } - } -} - -fn level_to_string(level: LogLevel) -> &'static str { - match level { - LogLevel::Trace => "TRACE", - LogLevel::Debug => "DEBUG", - LogLevel::Info => "INFO", - LogLevel::Warn => "WARN", - LogLevel::Error => "ERROR", - } -} - -pub fn set_global_log_level(level: LogLevel) { - let _ = LOGGER.set(Logger::new(level)); -} - -pub fn flush_output() {} - -pub fn trace(message: &str) { - if let Some(logger) = LOGGER.get() { - logger.log(LogLevel::Trace, message); - } -} - -pub fn debug(message: &str) { - if let Some(logger) = LOGGER.get() { - logger.log(LogLevel::Debug, message); - } -} - -pub fn info(message: &str) { - if let Some(logger) = LOGGER.get() { - logger.log(LogLevel::Info, message); - } -} - -pub fn warn(message: &str) { - if let Some(logger) = LOGGER.get() { - logger.log(LogLevel::Warn, message); - } -} - -pub fn error(message: &str) { - if let Some(logger) = LOGGER.get() { - logger.log(LogLevel::Error, message); - } -} diff --git a/mcp/src/main.rs b/mcp/src/main.rs index 924e0d62..017697d5 100644 --- a/mcp/src/main.rs +++ b/mcp/src/main.rs @@ -151,6 +151,9 @@ fn handle_tools_call(params: &Value) -> Value { }; let arguments = params.get("arguments").cloned().unwrap_or(json!({})); + let logger = tnmsd::infra::logger::create_logger("mcp.tools", None); + let _span = logger.span(&format!("tools.{}", name)).enter(); + match name { "list_prompts" => handle_list_prompts(&arguments), "get_prompt" => handle_get_prompt(&arguments), @@ -319,13 +322,30 @@ fn run_stdio_server() { } fn main() -> ExitCode { + // Initialize logger, default Info, override via LOG_LEVEL env var + tnmsd::infra::logger::set_global_level( + std::env::var("LOG_LEVEL") + .ok() + .and_then(|s| tnmsd::infra::logger::LogLevel::from_str_loose(&s)) + .unwrap_or(tnmsd::infra::logger::LogLevel::Info) + ); + let cli = Cli::parse(); + let logger = tnmsd::infra::logger::create_logger("tnmsm", None); match resolve_command(&cli) { ResolvedCommand::Serve => { + let _span = logger.span("server.serve").enter(); + logger.info("MCP server started", Some(json!({ + "serverName": SERVER_NAME, + "protocolVersion": PROTOCOL_VERSION, + }))); run_stdio_server(); ExitCode::SUCCESS } - ResolvedCommand::AssembleNpm(args) => commands::package::execute(&args), + ResolvedCommand::AssembleNpm(args) => { + let _span = logger.span("command.assemble_npm").enter(); + commands::package::execute(&args) + } } } diff --git a/sdk/src/context/mod.rs b/sdk/src/context/mod.rs index 6b9ede14..f2a1b6ed 100644 --- a/sdk/src/context/mod.rs +++ b/sdk/src/context/mod.rs @@ -1,3 +1 @@ -pub mod output_context; - -pub use output_context::OutputContext; +pub use crate::domain::output_context::OutputContext; diff --git a/sdk/src/domain/base_output_plans.rs b/sdk/src/domain/base_output_plans.rs index 238fc95a..2e708eef 100644 --- a/sdk/src/domain/base_output_plans.rs +++ b/sdk/src/domain/base_output_plans.rs @@ -3,12 +3,12 @@ use std::path::{Component, Path, PathBuf}; use serde::{Deserialize, Serialize}; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::{ IDEKind, Project, ProjectIDEConfigFile, RelativePath, Workspace, }; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; -use crate::policy::git_discovery::{find_all_git_repos, resolve_git_info_dir}; +use crate::infra::git_fs::{find_all_git_repos, resolve_git_info_dir}; const AGENTS_PLUGIN_NAME: &str = "AgentsOutputAdaptor"; const GIT_EXCLUDE_PLUGIN_NAME: &str = "GitExcludeOutputAdaptor"; diff --git a/sdk/src/domain/cleanup.rs b/sdk/src/domain/cleanup.rs new file mode 100644 index 00000000..322828c0 --- /dev/null +++ b/sdk/src/domain/cleanup.rs @@ -0,0 +1,141 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ProtectionModeDto { + Direct, + Recursive, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ProtectionRuleMatcherDto { + Path, + Glob, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum CleanupTargetKindDto { + File, + Directory, + Glob, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum CleanupErrorKindDto { + File, + Directory, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupTargetDto { + pub path: String, + pub kind: CleanupTargetKindDto, + #[serde(default)] + pub exclude_basenames: Vec, + pub protection_mode: Option, + pub scope: Option, + pub label: Option, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupDeclarationsDto { + #[serde(default)] + pub delete: Vec, + #[serde(default)] + pub protect: Vec, + #[serde(default)] + pub exclude_scan_globs: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PluginCleanupSnapshotDto { + pub plugin_name: String, + #[serde(default)] + pub outputs: Vec, + #[serde(default)] + pub cleanup: CleanupDeclarationsDto, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProtectedRuleDto { + pub path: String, + pub protection_mode: ProtectionModeDto, + pub reason: String, + pub source: String, + pub matcher: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupSnapshot { + pub workspace_dir: String, + pub aindex_dir: Option, + #[serde(default)] + pub project_roots: Vec, + #[serde(default)] + pub protected_rules: Vec, + #[serde(default)] + pub plugin_snapshots: Vec, + pub empty_dir_exclude_globs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProtectedPathViolationDto { + pub target_path: String, + pub protected_path: String, + pub protection_mode: ProtectionModeDto, + pub reason: String, + pub source: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupProtectionConflictDto { + pub output_path: String, + pub output_plugin: String, + pub protected_path: String, + pub protection_mode: ProtectionModeDto, + pub protected_by: String, + pub reason: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupPlan { + pub files_to_delete: Vec, + pub dirs_to_delete: Vec, + pub empty_dirs_to_delete: Vec, + pub violations: Vec, + pub conflicts: Vec, + pub excluded_scan_globs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupErrorDto { + pub path: String, + pub kind: CleanupErrorKindDto, + pub error: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupExecutionResultDto { + pub deleted_files: usize, + pub deleted_dirs: usize, + pub errors: Vec, + pub violations: Vec, + pub conflicts: Vec, + pub files_to_delete: Vec, + pub dirs_to_delete: Vec, + pub empty_dirs_to_delete: Vec, + pub excluded_scan_globs: Vec, +} diff --git a/sdk/src/domain/config/mod.rs b/sdk/src/domain/config/mod.rs index e0152344..269ee8d8 100644 --- a/sdk/src/domain/config/mod.rs +++ b/sdk/src/domain/config/mod.rs @@ -268,7 +268,9 @@ pub struct RuntimeEnvironmentContext { } fn home_dir() -> Option { - dirs::home_dir() + std::env::var_os("HOME") + .map(PathBuf::from) + .or_else(|| dirs::home_dir()) } fn normalize_posix_like_path(raw_path: &str) -> String { @@ -666,7 +668,13 @@ pub fn resolve_workspace_aindex_source_series_dir( } /// Get the global config file path: `~/.aindex/.tnmsc.json` +/// +/// Override via `TNMSC_CONFIG_PATH` environment variable. pub fn get_global_config_path() -> PathBuf { + if let Ok(override_path) = std::env::var("TNMSC_CONFIG_PATH") { + return PathBuf::from(override_path); + } + let runtime_environment = resolve_runtime_environment(); if let Some(selected_path) = runtime_environment.selected_global_config_path { diff --git a/sdk/src/domain/mod.rs b/sdk/src/domain/mod.rs index cc7018c4..6f46f85d 100644 --- a/sdk/src/domain/mod.rs +++ b/sdk/src/domain/mod.rs @@ -1,10 +1,17 @@ pub mod base_output_plans; +pub mod cleanup; pub mod config; +pub mod output_context; pub mod output_plans; pub mod plugin_shared; pub use base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPlansDto}; +pub use cleanup::{ + CleanupDeclarationsDto, CleanupPlan, CleanupSnapshot, CleanupTargetDto, CleanupTargetKindDto, + ProtectionModeDto, +}; pub use config::{ConfigLoader, MergedConfigResult, PluginsConfig, UserConfigFile}; +pub use output_context::OutputContext; pub use plugin_shared::{ AIAgentIgnoreConfigFile, FastCommandPrompt, GlobalMemoryPrompt, IDEKind, NamingCaseKind, PluginKind, Project, ProjectIDEConfigFile, PromptKind, ReadmePrompt, RelativePath, RulePrompt, diff --git a/sdk/src/domain/output_context.rs b/sdk/src/domain/output_context.rs new file mode 100644 index 00000000..7bd235e3 --- /dev/null +++ b/sdk/src/domain/output_context.rs @@ -0,0 +1,53 @@ +use serde::{Deserialize, Serialize}; + +use crate::domain::plugin_shared::{ + AIAgentIgnoreConfigFile, FastCommandPrompt, GlobalMemoryPrompt, ProjectIDEConfigFile, + ReadmePrompt, RulePrompt, SkillPrompt, SubAgentPrompt, Workspace, +}; + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OutputContext { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub workspace: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub vscode_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub zed_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub jetbrains_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub editor_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub fast_commands: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub sub_agents: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub skills: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub rules: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub global_memory: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub global_git_ignore: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub shadow_git_exclude: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub shadow_source_project_dir: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub readme_prompts: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub ai_agent_ignore_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub registered_output_plugins: Option>, +} + +impl OutputContext { + pub fn from_json(json: &str) -> Result { + serde_json::from_str(json) + } + + pub fn to_json(&self) -> Result { + serde_json::to_string(self) + } +} diff --git a/sdk/src/domain/output_plans/claude_code_output_plan.rs b/sdk/src/domain/output_plans/claude_code_output_plan.rs index 41722c81..050dd0f9 100644 --- a/sdk/src/domain/output_plans/claude_code_output_plan.rs +++ b/sdk/src/domain/output_plans/claude_code_output_plan.rs @@ -3,11 +3,11 @@ use std::path::PathBuf; use serde_json::Value; use crate::CliError; -use crate::context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; use crate::domain::config; +use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const CLAUDE_CODE_PLUGIN_NAME: &str = "ClaudeCodeCLIOutputAdaptor"; const CLAUDE_CODE_MEMORY_FILE: &str = "CLAUDE.md"; diff --git a/sdk/src/domain/output_plans/codex_output_plan.rs b/sdk/src/domain/output_plans/codex_output_plan.rs index 6fd73388..68eb89a0 100644 --- a/sdk/src/domain/output_plans/codex_output_plan.rs +++ b/sdk/src/domain/output_plans/codex_output_plan.rs @@ -18,11 +18,11 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::config; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const CODEX_PLUGIN_NAME: &str = "CodexCLIOutputAdaptor"; const CODEX_INSTRUCTIONS_FILE: &str = "AGENTS.md"; @@ -254,7 +254,7 @@ fn build_agent_toml_content(agent: &crate::domain::plugin_shared::SubAgentPrompt } fn build_command_content(command: &crate::domain::plugin_shared::FastCommandPrompt) -> String { - let mut metadata = if let Some(ref yaml_fm) = command.yaml_front_matter { + let metadata = if let Some(ref yaml_fm) = command.yaml_front_matter { match serde_json::to_value(yaml_fm) { Ok(serde_json::Value::Object(map)) => map, _ => serde_json::Map::new(), @@ -541,13 +541,6 @@ fn get_project_output_projects(workspace: &Workspace) -> Vec<&Project> { projects } -fn get_project_prompt_output_projects(workspace: &Workspace) -> Vec<&Project> { - get_project_output_projects(workspace) - .into_iter() - .filter(|p| p.is_prompt_source_project != Some(true)) - .collect() -} - fn resolve_project_root_dir(workspace: &Workspace, project: &Project) -> Option { if project.is_workspace_root_project == Some(true) { return Some(PathBuf::from(&workspace.directory.path)); diff --git a/sdk/src/domain/output_plans/cursor_output_plan.rs b/sdk/src/domain/output_plans/cursor_output_plan.rs index 79a334f3..8edbfb6e 100644 --- a/sdk/src/domain/output_plans/cursor_output_plan.rs +++ b/sdk/src/domain/output_plans/cursor_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const CURSOR_PLUGIN_NAME: &str = "CursorOutputAdaptor"; const CURSOR_MEMORY_FILE: &str = ".cursorrules"; diff --git a/sdk/src/domain/output_plans/droid_output_plan.rs b/sdk/src/domain/output_plans/droid_output_plan.rs index c91869a2..c0220f13 100644 --- a/sdk/src/domain/output_plans/droid_output_plan.rs +++ b/sdk/src/domain/output_plans/droid_output_plan.rs @@ -5,13 +5,13 @@ use serde::{Deserialize, Serialize}; use serde_json::{Map, Value}; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::config; use crate::domain::plugin_shared::{ FastCommandPrompt, Project, RelativePath, RuleScope, SkillPrompt, SkillResourceEncoding, Workspace, }; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const DROID_PLUGIN_NAME: &str = "DroidCLIOutputAdaptor"; const DROID_MEMORY_FILE: &str = "AGENTS.md"; @@ -991,6 +991,7 @@ mod tests { .find(|entry| { entry .path + .replace('\\', "/") .ends_with("project-a/.factory/skills/ship/SKILL.md") }) .unwrap(); @@ -1000,6 +1001,7 @@ mod tests { .find(|entry| { entry .path + .replace('\\', "/") .ends_with("project-a/.factory/skills/ship/assets/blob.bin") }) .unwrap(); diff --git a/sdk/src/domain/output_plans/gemini_output_plan.rs b/sdk/src/domain/output_plans/gemini_output_plan.rs index f970fd36..a3ad9ea0 100644 --- a/sdk/src/domain/output_plans/gemini_output_plan.rs +++ b/sdk/src/domain/output_plans/gemini_output_plan.rs @@ -2,11 +2,11 @@ use std::collections::HashSet; use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::config; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const GEMINI_PLUGIN_NAME: &str = "GeminiCLIOutputAdaptor"; const GEMINI_MEMORY_FILE: &str = "GEMINI.md"; diff --git a/sdk/src/domain/output_plans/generic_skills_output_plan.rs b/sdk/src/domain/output_plans/generic_skills_output_plan.rs index ff62d3cd..7210563a 100644 --- a/sdk/src/domain/output_plans/generic_skills_output_plan.rs +++ b/sdk/src/domain/output_plans/generic_skills_output_plan.rs @@ -1,8 +1,8 @@ use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; use crate::domain::plugin_shared::Workspace; -use crate::policy::cleanup::CleanupDeclarationsDto; +use crate::domain::cleanup::CleanupDeclarationsDto; const GENERIC_SKILLS_PLUGIN_NAME: &str = "GenericSkillsOutputAdaptor"; diff --git a/sdk/src/domain/output_plans/jetbrains_ai_assistant_codex_output_plan.rs b/sdk/src/domain/output_plans/jetbrains_ai_assistant_codex_output_plan.rs index fd13504b..24d97286 100644 --- a/sdk/src/domain/output_plans/jetbrains_ai_assistant_codex_output_plan.rs +++ b/sdk/src/domain/output_plans/jetbrains_ai_assistant_codex_output_plan.rs @@ -1,8 +1,8 @@ use crate::CliError; -use crate::context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; +use crate::domain::cleanup::CleanupDeclarationsDto; +use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::Workspace; -use crate::policy::cleanup::CleanupDeclarationsDto; const JB_PLUGIN_NAME: &str = "JetBrainsAIAssistantCodexOutputAdaptor"; diff --git a/sdk/src/domain/output_plans/kiro_output_plan.rs b/sdk/src/domain/output_plans/kiro_output_plan.rs index 4cd62e11..472b5095 100644 --- a/sdk/src/domain/output_plans/kiro_output_plan.rs +++ b/sdk/src/domain/output_plans/kiro_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const KIRO_PLUGIN_NAME: &str = "KiroCLIOutputAdaptor"; const PROJECT_SCOPE: &str = "project"; diff --git a/sdk/src/domain/output_plans/opencode_output_plan.rs b/sdk/src/domain/output_plans/opencode_output_plan.rs index 07ccb0ce..6bd98f61 100644 --- a/sdk/src/domain/output_plans/opencode_output_plan.rs +++ b/sdk/src/domain/output_plans/opencode_output_plan.rs @@ -3,11 +3,11 @@ use std::path::PathBuf; use serde_json::Value; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::config; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const OPENCODE_PLUGIN_NAME: &str = "OpencodeCLIOutputAdaptor"; const OPENCODE_MEMORY_FILE: &str = "AGENTS.md"; diff --git a/sdk/src/domain/output_plans/qoder_output_plan.rs b/sdk/src/domain/output_plans/qoder_output_plan.rs index ede69984..81fdb322 100644 --- a/sdk/src/domain/output_plans/qoder_output_plan.rs +++ b/sdk/src/domain/output_plans/qoder_output_plan.rs @@ -1,8 +1,8 @@ use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; use crate::domain::plugin_shared::Workspace; -use crate::policy::cleanup::CleanupDeclarationsDto; +use crate::domain::cleanup::CleanupDeclarationsDto; const QODER_PLUGIN_NAME: &str = "QoderIDEPluginOutputAdaptor"; diff --git a/sdk/src/domain/output_plans/trae_output_plan.rs b/sdk/src/domain/output_plans/trae_output_plan.rs index 68948637..6c4c9f42 100644 --- a/sdk/src/domain/output_plans/trae_output_plan.rs +++ b/sdk/src/domain/output_plans/trae_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const TRAE_PLUGIN_NAME: &str = "TraeOutputAdaptor"; const TRAE_STEERING_FILE: &str = "GLOBAL.md"; diff --git a/sdk/src/domain/output_plans/warp_output_plan.rs b/sdk/src/domain/output_plans/warp_output_plan.rs index 82e687c5..5a604ae9 100644 --- a/sdk/src/domain/output_plans/warp_output_plan.rs +++ b/sdk/src/domain/output_plans/warp_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const WARP_PLUGIN_NAME: &str = "WarpIDEOutputAdaptor"; const WARP_MEMORY_FILE: &str = "WARP.md"; diff --git a/sdk/src/domain/output_plans/windsurf_output_plan.rs b/sdk/src/domain/output_plans/windsurf_output_plan.rs index 8350b7d9..c97b302c 100644 --- a/sdk/src/domain/output_plans/windsurf_output_plan.rs +++ b/sdk/src/domain/output_plans/windsurf_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const WINDSURF_PLUGIN_NAME: &str = "WindsurfOutputAdaptor"; const WINDSURF_MEMORY_FILE: &str = ".windsurfrules"; diff --git a/sdk/src/infra/git_fs.rs b/sdk/src/infra/git_fs.rs new file mode 100644 index 00000000..9197d21b --- /dev/null +++ b/sdk/src/infra/git_fs.rs @@ -0,0 +1,196 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +pub fn resolve_git_info_dir(project_dir: &Path) -> Option { + let dot_git = project_dir.join(".git"); + if !dot_git.exists() { + return None; + } + + let metadata = fs::symlink_metadata(&dot_git).ok()?; + if metadata.is_dir() { + return Some(dot_git.join("info")); + } + + if metadata.is_file() { + let content = fs::read_to_string(&dot_git).ok()?; + for line in content.lines() { + let line = line.trim(); + if let Some(gitdir) = line.strip_prefix("gitdir:") { + let gitdir = Path::new(gitdir.trim()); + let resolved = if gitdir.is_absolute() { + gitdir.to_path_buf() + } else { + project_dir.join(gitdir) + }; + return Some(resolved.join("info")); + } + } + } + + None +} + +const SKIP_DIRS: &[&str] = &["node_modules", ".turbo", "dist", "build", "out", ".cache"]; + +pub fn find_all_git_repos(root_dir: &Path, max_depth: usize) -> Vec { + let mut results = Vec::new(); + + fn walk(dir: &Path, root_dir: &Path, depth: usize, max_depth: usize, results: &mut Vec) { + if depth > max_depth { + return; + } + + let entries = match fs::read_dir(dir) { + Ok(e) => e, + Err(_) => return, + }; + + let mut has_git = false; + let mut subdirs = Vec::new(); + + for entry in entries.flatten() { + let name = entry.file_name(); + let name_str = name.to_string_lossy(); + if name_str == ".git" { + has_git = true; + continue; + } + if let Ok(ft) = entry.file_type() + && ft.is_dir() + && !SKIP_DIRS.contains(&name_str.as_ref()) + { + subdirs.push(entry.path()); + } + } + + if has_git && dir != root_dir { + results.push(dir.to_path_buf()); + } + + for subdir in subdirs { + walk(&subdir, root_dir, depth + 1, max_depth, results); + } + } + + walk(root_dir, root_dir, 0, max_depth, &mut results); + results +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + #[test] + fn test_resolve_git_info_dir_for_regular_repo() { + let tmp = TempDir::new().unwrap(); + let dot_git = tmp.path().join(".git"); + fs::create_dir_all(&dot_git).unwrap(); + + let result = resolve_git_info_dir(tmp.path()); + assert_eq!(result, Some(dot_git.join("info"))); + } + + #[test] + fn test_resolve_git_info_dir_for_gitlink() { + let tmp = TempDir::new().unwrap(); + let dot_git = tmp.path().join(".git"); + fs::write(&dot_git, "gitdir: /absolute/path/to/git\n").unwrap(); + + let result = resolve_git_info_dir(tmp.path()); + assert!(result.is_some()); + let result_str = result.as_ref().unwrap().to_string_lossy().replace('\\', "/"); + // On Windows, absolute paths starting with / get a drive letter prefix + let result_normalized = result_str + .strip_prefix("C:") + .or_else(|| result_str.strip_prefix("c:")) + .unwrap_or(&result_str); + assert_eq!(result_normalized, "/absolute/path/to/git/info"); + } + + #[test] + fn test_resolve_git_info_dir_for_relative_gitlink() { + let tmp = TempDir::new().unwrap(); + let dot_git = tmp.path().join(".git"); + fs::write(&dot_git, "gitdir: ../.git/modules/foo\n").unwrap(); + + let result = resolve_git_info_dir(tmp.path()); + assert_eq!( + result, + Some( + tmp + .path() + .join("..") + .join(".git") + .join("modules") + .join("foo") + .join("info") + .canonicalize() + .unwrap_or_else(|_| tmp + .path() + .join("..") + .join(".git") + .join("modules") + .join("foo") + .join("info")) + ) + ); + } + + #[test] + fn test_resolve_git_info_dir_missing() { + let tmp = TempDir::new().unwrap(); + assert_eq!(resolve_git_info_dir(tmp.path()), None); + } + + #[test] + fn test_find_all_git_repos_finds_nested() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + let child = root.join("packages").join("app"); + fs::create_dir_all(root.join(".git")).unwrap(); + fs::create_dir_all(child.join(".git")).unwrap(); + + let result = find_all_git_repos(root, 5); + assert_eq!(result.len(), 1); + assert_eq!(result[0], child); + } + + #[test] + fn test_find_all_git_repos_excludes_root() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + fs::create_dir_all(root.join(".git")).unwrap(); + + let result = find_all_git_repos(root, 5); + assert!(result.is_empty()); + } + + #[test] + fn test_find_all_git_repos_skips_skip_dirs() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + let node_modules = root.join("node_modules").join("some-lib"); + fs::create_dir_all(node_modules.join(".git")).unwrap(); + + let result = find_all_git_repos(root, 5); + assert!(result.is_empty()); + } + + #[test] + fn test_find_all_git_repos_respects_max_depth() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + let deep = root.join("a").join("b").join("c").join("d"); + fs::create_dir_all(deep.join(".git")).unwrap(); + + let result = find_all_git_repos(root, 3); + assert!(result.is_empty()); + + let result = find_all_git_repos(root, 4); + assert_eq!(result.len(), 1); + assert_eq!(result[0], deep); + } +} diff --git a/sdk/src/infra/logger.rs b/sdk/src/infra/logger.rs deleted file mode 100644 index 5a2bcd44..00000000 --- a/sdk/src/infra/logger.rs +++ /dev/null @@ -1,1175 +0,0 @@ -#![deny(clippy::all)] - -//! AI-friendly Markdown logger with minimal terminal noise. -//! -//! Output format: -//! - Messages: `### Title` with optional Markdown bullet metadata -//! - Diagnostics: `### Title` followed by concise action-focused sections - -use serde::{Deserialize, Serialize}; -use serde_json::{Map, Value}; -use std::io::{BufWriter, Write}; -use std::sync::atomic::{AtomicU8, Ordering}; -use std::sync::mpsc::{self, Receiver, Sender}; -use std::sync::{LazyLock, Mutex}; -use std::thread; - -// --------------------------------------------------------------------------- -// Log levels -// --------------------------------------------------------------------------- - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize)] -#[serde(rename_all = "lowercase")] -pub enum LogLevel { - Silent, - Fatal, - Error, - Warn, - Info, - Debug, - Trace, -} - -impl LogLevel { - fn priority(self) -> u8 { - match self { - Self::Silent => 0, - Self::Fatal => 1, - Self::Error => 2, - Self::Warn => 3, - Self::Info => 4, - Self::Debug => 5, - Self::Trace => 6, - } - } - - fn as_str(self) -> &'static str { - match self { - Self::Silent => "silent", - Self::Fatal => "fatal", - Self::Error => "error", - Self::Warn => "warn", - Self::Info => "info", - Self::Debug => "debug", - Self::Trace => "trace", - } - } - - pub fn from_str_loose(s: &str) -> Option { - match s.to_ascii_lowercase().as_str() { - "silent" => Some(Self::Silent), - "fatal" => Some(Self::Fatal), - "error" => Some(Self::Error), - "warn" => Some(Self::Warn), - "info" => Some(Self::Info), - "debug" => Some(Self::Debug), - "trace" => Some(Self::Trace), - _ => None, - } - } -} - -// --------------------------------------------------------------------------- -// LogRecord (the structured return value) -// --------------------------------------------------------------------------- - -#[derive(Debug, Clone, Serialize)] -pub struct LogRecord { - #[serde(rename = "$")] - pub meta: (String, String, String), - #[serde(rename = "_")] - pub payload: Value, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct LoggerDiagnosticInput { - pub code: String, - pub title: String, - pub root_cause: Vec, - #[serde(skip_serializing_if = "Option::is_none")] - pub exact_fix: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub possible_fixes: Option>>, - #[serde(skip_serializing_if = "Option::is_none")] - pub details: Option>, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct LoggerDiagnosticRecord { - pub code: String, - pub title: String, - pub root_cause: Vec, - #[serde(skip_serializing_if = "Option::is_none")] - pub exact_fix: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub possible_fixes: Option>>, - #[serde(skip_serializing_if = "Option::is_none")] - pub details: Option>, - pub level: String, - pub namespace: String, - pub copy_text: Vec, -} - -// --------------------------------------------------------------------------- -// Global log level -// --------------------------------------------------------------------------- - -static GLOBAL_LOG_LEVEL: AtomicU8 = AtomicU8::new(255); // 255 = unset -static BUFFERED_DIAGNOSTICS: LazyLock>> = - LazyLock::new(|| Mutex::new(Vec::new())); -static OUTPUT_SINK: LazyLock> = LazyLock::new(spawn_output_sink); - -enum OutputCommand { - Write { use_stderr: bool, output: String }, - Flush { ack: Sender<()> }, -} - -/// Set the global log level for all loggers. -pub fn set_global_log_level(level: LogLevel) { - GLOBAL_LOG_LEVEL.store(level.priority(), Ordering::Relaxed); -} - -/// Get the current global log level. -pub fn get_global_log_level() -> Option { - let v = GLOBAL_LOG_LEVEL.load(Ordering::Relaxed); - if v == 255 { None } else { priority_to_level(v) } -} - -pub fn clear_buffered_diagnostics() { - if let Ok(mut buffered) = BUFFERED_DIAGNOSTICS.lock() { - buffered.clear(); - } -} - -pub fn drain_buffered_diagnostics() -> Vec { - match BUFFERED_DIAGNOSTICS.lock() { - Ok(mut buffered) => std::mem::take(&mut *buffered), - Err(_) => Vec::new(), - } -} - -pub fn flush_output() { - let (ack_tx, ack_rx) = mpsc::channel(); - if OUTPUT_SINK - .send(OutputCommand::Flush { ack: ack_tx }) - .is_ok() - { - let _ = ack_rx.recv(); - } -} - -fn priority_to_level(p: u8) -> Option { - match p { - 0 => Some(LogLevel::Silent), - 1 => Some(LogLevel::Fatal), - 2 => Some(LogLevel::Error), - 3 => Some(LogLevel::Warn), - 4 => Some(LogLevel::Info), - 5 => Some(LogLevel::Debug), - 6 => Some(LogLevel::Trace), - _ => None, - } -} - -fn resolve_log_level(explicit: Option) -> LogLevel { - if let Some(l) = explicit { - return l; - } - if let Some(l) = get_global_log_level() { - return l; - } - if let Ok(env_val) = std::env::var("LOG_LEVEL") - && let Some(l) = LogLevel::from_str_loose(&env_val) - { - return l; - } - LogLevel::Info -} - -// --------------------------------------------------------------------------- -// JSON formatting -// --------------------------------------------------------------------------- - -fn indent(level: usize) -> String { - " ".repeat(level) -} - -fn to_plain_json(value: &Value) -> String { - serde_json::to_string(value) - .unwrap_or_else(|_| r#"{"error":"failed to serialize output"}"#.to_string()) -} - -// --------------------------------------------------------------------------- -// Diagnostics -// --------------------------------------------------------------------------- - -fn validate_non_empty_lines(field_name: &str, lines: &[String], errors: &mut Vec) { - if lines.is_empty() { - errors.push(format!("{field_name} must contain at least one line")); - } -} - -fn validate_diagnostic_input(input: &LoggerDiagnosticInput) -> Result<(), Vec> { - let mut errors: Vec = Vec::new(); - - if input.code.trim().is_empty() { - errors.push("code must be a non-empty string".to_string()); - } - if input.title.trim().is_empty() { - errors.push("title must be a non-empty string".to_string()); - } - validate_non_empty_lines("rootCause", &input.root_cause, &mut errors); - - if let Some(lines) = &input.exact_fix { - validate_non_empty_lines("exactFix", lines, &mut errors); - } - - if let Some(fixes) = &input.possible_fixes { - if fixes.is_empty() { - errors.push("possibleFixes must contain at least one fix when provided".to_string()); - } - for (index, lines) in fixes.iter().enumerate() { - if lines.is_empty() { - errors.push(format!( - "possibleFixes[{index}] must contain at least one line" - )); - } - } - } - - if errors.is_empty() { - Ok(()) - } else { - Err(errors) - } -} - -fn build_payload(message: &Value, meta: Option<&Value>) -> Value { - let Some(meta_val) = meta else { - return message.clone(); - }; - - if meta_val.as_object().is_some_and(|object| object.is_empty()) { - return message.clone(); - } - - let message_str = match message { - Value::String(s) => s.as_str(), - _ => "", - }; - - if message_str.is_empty() { - return meta_val.clone(); - } - - if meta_val.is_object() { - let mut map = Map::new(); - map.insert(message_str.to_string(), meta_val.clone()); - return Value::Object(map); - } - - let mut map = Map::new(); - map.insert( - "message".to_string(), - Value::String(message_str.to_string()), - ); - map.insert("meta".to_string(), meta_val.clone()); - Value::Object(map) -} - -fn append_section( - lines: &mut Vec, - title: &str, - entries: &[String], - numbered: Option, -) { - if entries.is_empty() { - return; - } - - if !lines.is_empty() { - lines.push(String::new()); - } - - if !title.is_empty() { - lines.push(title.to_string()); - } - - match numbered { - Some(number) => { - let mut iter = entries.iter(); - if let Some(first) = iter.next() { - lines.push(format!(" {number}. {first}")); - } - for entry in iter { - lines.push(format!(" {entry}")); - } - } - None => { - for entry in entries { - lines.push(format!(" - {entry}")); - } - } - } -} - -fn scalar_to_markdown_text(value: &Value) -> String { - match value { - Value::Null => "null".to_string(), - Value::Bool(boolean) => boolean.to_string(), - Value::Number(number) => number.to_string(), - Value::String(text) => text.clone(), - Value::Array(_) | Value::Object(_) => to_plain_json(value), - } -} - -fn append_markdown_value( - lines: &mut Vec, - label: Option<&str>, - value: &Value, - depth: usize, -) { - let prefix = indent(depth); - let bullet = format!("{prefix}- "); - - match value { - Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => match label { - Some(name) => { - lines.push(format!( - "{bullet}{name}: {}", - scalar_to_markdown_text(value) - )); - } - None => { - lines.push(format!("{bullet}{}", scalar_to_markdown_text(value))); - } - }, - Value::Array(items) => { - if items.is_empty() { - match label { - Some(name) => { - lines.push(format!("{bullet}{name}: []")); - } - None => { - lines.push(format!("{bullet}[]")); - } - } - return; - } - - if let Some(name) = label { - lines.push(format!("{bullet}{name}:")); - for item in items { - append_markdown_value(lines, None, item, depth + 1); - } - return; - } - - for item in items { - append_markdown_value(lines, None, item, depth); - } - } - Value::Object(map) => { - if map.is_empty() { - match label { - Some(name) => { - lines.push(format!("{bullet}{name}: {{}}")); - } - None => { - lines.push(format!("{bullet}{{}}")); - } - } - return; - } - - if let Some(name) = label { - lines.push(format!("{bullet}{name}:")); - for (key, nested) in map { - append_markdown_value(lines, Some(key), nested, depth + 1); - } - return; - } - - for (key, nested) in map { - append_markdown_value(lines, Some(key), nested, depth); - } - } - } -} - -fn value_to_markdown_lines(value: &Value) -> Vec { - let mut lines = Vec::new(); - append_markdown_value(&mut lines, None, value, 0); - lines -} - -fn extract_message_and_meta_lines(payload: &Value) -> (Option, Vec) { - match payload { - Value::String(text) => (Some(text.clone()), Vec::new()), - Value::Object(map) => { - if let Some(Value::String(message)) = map.get("message") { - let mut remainder = map.clone(); - remainder.remove("message"); - let lines = if remainder.is_empty() { - Vec::new() - } else { - value_to_markdown_lines(&Value::Object(remainder)) - }; - return (Some(message.clone()), lines); - } - - if map.len() == 1 - && let Some((message, nested)) = map.iter().next() - { - match nested { - Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => { - return ( - Some(format!("{message}: {}", scalar_to_markdown_text(nested))), - Vec::new(), - ); - } - Value::Array(items) if !items.is_empty() => { - return (Some(message.clone()), value_to_markdown_lines(nested)); - } - Value::Object(object) if !object.is_empty() => { - return (Some(message.clone()), value_to_markdown_lines(nested)); - } - _ => {} - } - } - - (None, value_to_markdown_lines(payload)) - } - _ => (None, value_to_markdown_lines(payload)), - } -} - -fn split_preserved_lines(text: &str) -> Vec { - text - .split('\n') - .map(|line| line.trim_end_matches('\r').to_string()) - .collect() -} - -fn render_markdown_heading(title: &str) -> String { - format!("### {title}") -} - -fn split_message_title(message: &str) -> (String, Vec) { - let mut lines = split_preserved_lines(message).into_iter(); - let title = lines - .find(|line| !line.trim().is_empty()) - .unwrap_or_else(|| "Details".to_string()); - let body = lines.collect(); - (title, body) -} - -fn render_message_output(_level: LogLevel, _namespace: &str, payload: &Value) -> String { - let (message, meta_lines) = extract_message_and_meta_lines(payload); - let mut lines = Vec::new(); - - match message { - Some(message) if message.contains('\n') => { - let (title, body_lines) = split_message_title(&message); - lines.push(render_markdown_heading(&title)); - if !body_lines.is_empty() { - lines.push(String::new()); - lines.extend(body_lines); - } - } - Some(message) => lines.push(render_markdown_heading(&message)), - None => { - lines.push(render_markdown_heading("Details")); - } - } - - if !meta_lines.is_empty() { - lines.push(String::new()); - lines.extend(meta_lines); - } - - lines.join("\n") -} - -fn render_diagnostic_output(_level: LogLevel, record: &LoggerDiagnosticRecord) -> String { - let mut lines = vec![render_markdown_heading(&record.title)]; - - if !record.root_cause.is_empty() { - append_section(&mut lines, "**What happened**", &record.root_cause, None); - } - - if let Some(exact_fix) = &record.exact_fix { - append_section(&mut lines, "**Do this**", exact_fix, None); - } - - if let Some(possible_fixes) = &record.possible_fixes - && !possible_fixes.is_empty() - { - if !lines.is_empty() { - lines.push(String::new()); - } - lines.push("**Try this if needed**".to_string()); - for (index, fix) in possible_fixes.iter().enumerate() { - let mut iter = fix.iter(); - if let Some(first) = iter.next() { - lines.push(format!(" {}. {}", index + 1, first)); - } - for entry in iter { - lines.push(format!(" {entry}")); - } - } - } - - if let Some(details) = &record.details - && !details.is_empty() - { - if !lines.is_empty() { - lines.push(String::new()); - } - lines.push("**Context**".to_string()); - let mut detail_lines = value_to_markdown_lines(&Value::Object(details.clone())); - for line in &mut detail_lines { - line.insert_str(0, " "); - } - lines.extend(detail_lines); - } - - lines.join("\n") -} - -fn build_copy_text(record: &LoggerDiagnosticRecord) -> Vec { - let mut lines = vec![record.title.clone()]; - - append_section(&mut lines, "**What happened**", &record.root_cause, None); - - if let Some(exact_fix) = &record.exact_fix { - append_section(&mut lines, "**Do this**", exact_fix, None); - } - - if let Some(possible_fixes) = &record.possible_fixes - && !possible_fixes.is_empty() - { - if !lines.is_empty() { - lines.push(String::new()); - } - lines.push("**Try this if needed**".to_string()); - for (index, fix) in possible_fixes.iter().enumerate() { - let mut iter = fix.iter(); - if let Some(first) = iter.next() { - lines.push(format!(" {}. {}", index + 1, first)); - } - for entry in iter { - lines.push(format!(" {entry}")); - } - } - } - - if let Some(details) = &record.details - && !details.is_empty() - { - if !lines.is_empty() { - lines.push(String::new()); - } - lines.push("**Context**".to_string()); - let mut detail_lines = value_to_markdown_lines(&Value::Object(details.clone())); - for line in &mut detail_lines { - line.insert_str(0, " "); - } - lines.extend(detail_lines); - } - - lines -} - -fn diagnostic_record_from_input( - namespace: &str, - level: LogLevel, - input: LoggerDiagnosticInput, -) -> LoggerDiagnosticRecord { - let mut record = LoggerDiagnosticRecord { - code: input.code.trim().to_string(), - title: input.title.trim().to_string(), - root_cause: input.root_cause, - exact_fix: input.exact_fix, - possible_fixes: input.possible_fixes, - details: input.details, - level: level.as_str().to_string(), - namespace: namespace.to_string(), - copy_text: Vec::new(), - }; - record.copy_text = build_copy_text(&record); - record -} - -fn invalid_diagnostic_record( - namespace: &str, - level: LogLevel, - raw_payload: Value, - validation_errors: &[String], -) -> LoggerDiagnosticRecord { - let mut details = Map::new(); - details.insert("rawPayload".to_string(), raw_payload); - details.insert( - "validationErrors".to_string(), - Value::Array( - validation_errors - .iter() - .map(|entry| Value::String(entry.clone())) - .collect(), - ), - ); - - let mut record = LoggerDiagnosticRecord { - code: "LOGGER_DIAGNOSTIC_SCHEMA_INVALID".to_string(), - title: "Logger diagnostic payload is invalid".to_string(), - root_cause: vec![ - "The logger received a warn/error/fatal payload that does not match the required diagnostic schema.".to_string(), - format!("Validation issues: {}", validation_errors.join("; ")), - ], - exact_fix: Some(vec![ - "Pass a diagnostic object with non-empty code, title, and rootCause fields.".to_string(), - "Keep exactFix and each possibleFixes entry as non-empty string arrays when they are present.".to_string(), - ]), - possible_fixes: None, - details: Some(details), - level: level.as_str().to_string(), - namespace: namespace.to_string(), - copy_text: Vec::new(), - }; - record.copy_text = build_copy_text(&record); - record -} - -fn parse_diagnostic_input( - namespace: &str, - level: LogLevel, - diagnostic: Value, -) -> LoggerDiagnosticRecord { - let parsed = serde_json::from_value::(diagnostic.clone()); - match parsed { - Ok(input) => match validate_diagnostic_input(&input) { - Ok(()) => diagnostic_record_from_input(namespace, level, input), - Err(validation_errors) => { - invalid_diagnostic_record(namespace, level, diagnostic, &validation_errors) - } - }, - Err(error) => invalid_diagnostic_record( - namespace, - level, - diagnostic, - &[format!("Diagnostic payload could not be parsed: {error}")], - ), - } -} - -fn serialize_payload(value: impl Serialize) -> Value { - serde_json::to_value(value).unwrap_or_else(|error| { - Value::Object(Map::from_iter([ - ( - "code".to_string(), - Value::String("LOGGER_SERIALIZATION_FAILED".to_string()), - ), - ( - "title".to_string(), - Value::String("Logger payload serialization failed".to_string()), - ), - ("error".to_string(), Value::String(error.to_string())), - ])) - }) -} - -fn push_buffered_diagnostic(record: &LoggerDiagnosticRecord) { - if let Ok(mut buffered) = BUFFERED_DIAGNOSTICS.lock() { - buffered.push(record.clone()); - } -} - -fn writes_to_stderr(level: LogLevel) -> bool { - matches!(level, LogLevel::Error | LogLevel::Fatal | LogLevel::Warn) -} - -// --------------------------------------------------------------------------- -// Format and print -// --------------------------------------------------------------------------- - -fn spawn_output_sink() -> Sender { - let (tx, rx) = mpsc::channel(); - thread::Builder::new() - .name("tnmsd-logger-output".to_string()) - .spawn(move || output_worker(rx)) - .expect("failed to spawn tnmsd logger output worker"); - tx -} - -fn output_worker(receiver: Receiver) { - let stdout = std::io::stdout(); - let stderr = std::io::stderr(); - let mut stdout_writer = BufWriter::new(stdout); - let mut stderr_writer = BufWriter::new(stderr); - - while let Ok(command) = receiver.recv() { - match command { - OutputCommand::Write { use_stderr, output } => { - if use_stderr { - let _ = write_output_line(&mut stderr_writer, &output); - } else { - let _ = write_output_line(&mut stdout_writer, &output); - } - } - OutputCommand::Flush { ack } => { - let _ = stdout_writer.flush(); - let _ = stderr_writer.flush(); - let _ = ack.send(()); - } - } - } - - let _ = stdout_writer.flush(); - let _ = stderr_writer.flush(); -} - -fn write_output_line(writer: &mut impl Write, output: &str) -> std::io::Result<()> { - writer.write_all(output.as_bytes())?; - writer.write_all(b"\n")?; - writer.flush() -} - -fn print_output_direct(use_stderr: bool, output: &str) { - if use_stderr { - let mut stderr = std::io::stderr().lock(); - let _ = writeln!(stderr, "{output}"); - let _ = stderr.flush(); - } else { - let mut stdout = std::io::stdout().lock(); - let _ = writeln!(stdout, "{output}"); - let _ = stdout.flush(); - } -} - -fn print_output(level: LogLevel, output: &str) { - let use_stderr = writes_to_stderr(level); - if OUTPUT_SINK - .send(OutputCommand::Write { - use_stderr, - output: output.to_string(), - }) - .is_err() - { - print_output_direct(use_stderr, output); - } -} - -fn emit_message_log_record(level: LogLevel, namespace: &str, payload: Value) -> LogRecord { - let record = LogRecord { - meta: ( - String::new(), - level.as_str().to_string(), - namespace.to_string(), - ), - payload: payload.clone(), - }; - print_output(level, &render_message_output(level, namespace, &payload)); - record -} - -fn emit_diagnostic_log_record(level: LogLevel, record: &LoggerDiagnosticRecord) -> LogRecord { - let payload = serialize_payload(record); - let emitted = LogRecord { - meta: ( - String::new(), - level.as_str().to_string(), - record.namespace.clone(), - ), - payload, - }; - print_output(level, &render_diagnostic_output(level, record)); - emitted -} - -// --------------------------------------------------------------------------- -// Logger -// --------------------------------------------------------------------------- - -pub struct Logger { - namespace: String, - level: LogLevel, -} - -impl Logger { - pub fn error(&self, diagnostic: LoggerDiagnosticInput) -> Option { - self.log_diagnostic(LogLevel::Error, serialize_payload(diagnostic)) - } - - pub fn warn(&self, diagnostic: LoggerDiagnosticInput) -> Option { - self.log_diagnostic(LogLevel::Warn, serialize_payload(diagnostic)) - } - - pub fn info(&self, message: impl Into, meta: Option) -> Option { - self.log_message(LogLevel::Info, message.into(), meta) - } - - pub fn debug(&self, message: impl Into, meta: Option) -> Option { - self.log_message(LogLevel::Debug, message.into(), meta) - } - - pub fn trace(&self, message: impl Into, meta: Option) -> Option { - self.log_message(LogLevel::Trace, message.into(), meta) - } - - pub fn fatal(&self, diagnostic: LoggerDiagnosticInput) -> Option { - self.log_diagnostic(LogLevel::Fatal, serialize_payload(diagnostic)) - } - - fn should_emit(&self, level: LogLevel) -> bool { - level.priority() <= self.level.priority() - } - - fn should_buffer_diagnostic(&self, level: LogLevel) -> bool { - self.should_emit(level) || self.level == LogLevel::Silent - } - - fn log_message(&self, level: LogLevel, message: Value, meta: Option) -> Option { - if level.priority() > self.level.priority() { - return None; - } - let payload = build_payload(&message, meta.as_ref()); - Some(emit_message_log_record(level, &self.namespace, payload)) - } - - fn log_diagnostic(&self, level: LogLevel, diagnostic: Value) -> Option { - let record = parse_diagnostic_input(&self.namespace, level, diagnostic); - - if self.should_buffer_diagnostic(level) { - push_buffered_diagnostic(&record); - } - - if !self.should_emit(level) { - return None; - } - - Some(emit_diagnostic_log_record(level, &record)) - } -} - -/// Create a new logger with the given namespace and optional log level. -pub fn create_logger(namespace: &str, log_level: Option) -> Logger { - Logger { - namespace: namespace.to_string(), - level: resolve_log_level(log_level), - } -} - -// --------------------------------------------------------------------------- -// Convenience macros -// --------------------------------------------------------------------------- - -#[macro_export] -macro_rules! log_info { - ($logger:expr, $msg:expr) => { - $logger.info(serde_json::Value::String($msg.to_string()), None) - }; - ($logger:expr, $msg:expr, $meta:expr) => { - $logger.info(serde_json::Value::String($msg.to_string()), Some($meta)) - }; -} - -#[macro_export] -macro_rules! log_error { - ($logger:expr, $diagnostic:expr) => { - $logger.error($diagnostic) - }; -} - -#[macro_export] -macro_rules! log_warn { - ($logger:expr, $diagnostic:expr) => { - $logger.warn($diagnostic) - }; -} - -#[macro_export] -macro_rules! log_debug { - ($logger:expr, $msg:expr) => { - $logger.debug(serde_json::Value::String($msg.to_string()), None) - }; - ($logger:expr, $msg:expr, $meta:expr) => { - $logger.debug(serde_json::Value::String($msg.to_string()), Some($meta)) - }; -} - -#[macro_export] -macro_rules! log_trace { - ($logger:expr, $msg:expr) => { - $logger.trace(serde_json::Value::String($msg.to_string()), None) - }; - ($logger:expr, $msg:expr, $meta:expr) => { - $logger.trace(serde_json::Value::String($msg.to_string()), Some($meta)) - }; -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_log_level_priority() { - assert!(LogLevel::Silent.priority() < LogLevel::Fatal.priority()); - assert!(LogLevel::Fatal.priority() < LogLevel::Error.priority()); - assert!(LogLevel::Error.priority() < LogLevel::Warn.priority()); - assert!(LogLevel::Warn.priority() < LogLevel::Info.priority()); - assert!(LogLevel::Info.priority() < LogLevel::Debug.priority()); - assert!(LogLevel::Debug.priority() < LogLevel::Trace.priority()); - } - - #[test] - fn test_log_level_from_str() { - assert_eq!(LogLevel::from_str_loose("info"), Some(LogLevel::Info)); - assert_eq!(LogLevel::from_str_loose("INFO"), Some(LogLevel::Info)); - assert_eq!(LogLevel::from_str_loose("Debug"), Some(LogLevel::Debug)); - assert_eq!(LogLevel::from_str_loose("unknown"), None); - } - - #[test] - fn test_create_logger_default_level() { - let logger = create_logger("test", None); - assert_eq!(logger.level, LogLevel::Info); - } - - #[test] - fn test_logger_filters_by_level() { - let logger = create_logger("test", Some(LogLevel::Warn)); - assert!( - logger - .log_message(LogLevel::Info, Value::String("hi".into()), None) - .is_none() - ); - assert!( - logger - .log_message(LogLevel::Error, Value::String("err".into()), None) - .is_some() - ); - } - - #[test] - fn test_build_payload_uses_meta_when_message_is_empty() { - let payload = build_payload( - &Value::String(String::new()), - Some(&serde_json::json!([1, 2, 3])), - ); - assert_eq!(payload, serde_json::json!([1, 2, 3])); - } - - #[test] - fn test_build_payload_wraps_non_object_meta_for_named_message() { - let payload = build_payload( - &Value::String("hello".into()), - Some(&serde_json::json!(["x"])), - ); - assert_eq!( - payload, - serde_json::json!({ - "message": "hello", - "meta": ["x"], - }) - ); - } - - #[test] - fn test_global_log_level() { - set_global_log_level(LogLevel::Debug); - assert_eq!(get_global_log_level(), Some(LogLevel::Debug)); - GLOBAL_LOG_LEVEL.store(255, Ordering::Relaxed); - } - - #[test] - fn test_validate_diagnostic_input_rejects_empty_root_cause() { - let diagnostic = LoggerDiagnosticInput { - code: "TEST".to_string(), - title: "Broken diagnostic".to_string(), - root_cause: Vec::new(), - exact_fix: None, - possible_fixes: None, - details: None, - }; - - assert!(validate_diagnostic_input(&diagnostic).is_err()); - } - - #[test] - fn test_build_copy_text_includes_expected_sections() { - let record = diagnostic_record_from_input( - "logger-test", - LogLevel::Error, - LoggerDiagnosticInput { - code: "TEST_ERROR".to_string(), - title: "Example diagnostic".to_string(), - root_cause: vec!["The config file is missing.".to_string()], - exact_fix: Some(vec![ - "Create the config file before running again.".to_string(), - ]), - possible_fixes: Some(vec![vec![ - "Restore the file from version control.".to_string(), - "Re-run the setup command if the file is generated.".to_string(), - ]]), - details: Some(Map::from_iter([( - "path".to_string(), - Value::String("/tmp/example.json".to_string()), - )])), - }, - ); - - assert_eq!(record.copy_text[0], "Example diagnostic"); - assert!(record.copy_text.contains(&"**What happened**".to_string())); - assert!(record.copy_text.contains(&"**Do this**".to_string())); - assert!( - record - .copy_text - .contains(&"**Try this if needed**".to_string()) - ); - assert!(record.copy_text.contains(&"**Context**".to_string())); - } - - #[test] - fn test_render_message_output_formats_markdown() { - let payload = Value::Object(Map::from_iter([( - "message".to_string(), - Value::String("hello".to_string()), - )])); - - let rendered = render_message_output(LogLevel::Info, "logger-test", &payload); - assert_eq!(rendered, "### hello"); - } - - #[test] - fn test_render_message_output_moves_multiline_message_to_block_body() { - let payload = Value::String("line one\nline two".to_string()); - let rendered = render_message_output(LogLevel::Info, "logger-test", &payload); - - assert_eq!(rendered, "### line one\n\nline two"); - } - - #[test] - fn test_render_message_output_renders_nested_payloads() { - let payload = serde_json::json!({ - "started": { - "command": "install", - } - }); - - let rendered = render_message_output(LogLevel::Info, "PluginPipeline", &payload); - assert!(rendered.contains("### started")); - assert!(rendered.contains("- command: install")); - } - - #[test] - fn test_render_diagnostic_output_uses_markdown_sections() { - let record = diagnostic_record_from_input( - "logger-test", - LogLevel::Warn, - LoggerDiagnosticInput { - code: "TEST_WARN".to_string(), - title: "Pretty output".to_string(), - root_cause: vec![ - "The warning must stay readable.".to_string(), - "Each copyText entry should appear on its own line.".to_string(), - ], - exact_fix: Some(vec!["Use pretty JSON for diagnostics.".to_string()]), - possible_fixes: None, - details: Some(Map::from_iter([( - "path".to_string(), - Value::String("C:\\runtime\\plugin".to_string()), - )])), - }, - ); - - let rendered = render_diagnostic_output(LogLevel::Warn, &record); - assert!(rendered.contains("### Pretty output")); - assert!(rendered.contains("**What happened**")); - assert!(rendered.contains(" - The warning must stay readable.")); - assert!(rendered.contains("**Context**")); - assert!(rendered.contains(" - path: C:\\runtime\\plugin")); - } - - #[test] - fn test_build_copy_text_renders_context_without_json_braces() { - let record = diagnostic_record_from_input( - "logger-test", - LogLevel::Warn, - LoggerDiagnosticInput { - code: "TEST_WARN".to_string(), - title: "Context output".to_string(), - root_cause: vec!["Keep context readable.".to_string()], - exact_fix: None, - possible_fixes: None, - details: Some(Map::from_iter([ - ( - "path".to_string(), - Value::String("C:\\runtime\\plugin".to_string()), - ), - ("phase".to_string(), Value::String("cleanup".to_string())), - ])), - }, - ); - - assert!( - record - .copy_text - .contains(&" - path: C:\\runtime\\plugin".to_string()) - ); - assert!(record.copy_text.contains(&" - phase: cleanup".to_string())); - assert!(!record.copy_text.iter().any(|line| line == "{")); - } - - #[test] - fn test_silent_logger_buffers_diagnostics() { - clear_buffered_diagnostics(); - - let logger = create_logger("buffer-test", Some(LogLevel::Silent)); - assert!( - logger - .warn(LoggerDiagnosticInput { - code: "BUFFERED_WARN".to_string(), - title: "Buffered diagnostic".to_string(), - root_cause: vec!["Silent mode should still retain diagnostics.".to_string()], - exact_fix: None, - possible_fixes: None, - details: None, - }) - .is_none() - ); - - let drained = drain_buffered_diagnostics(); - assert_eq!(drained.len(), 1); - assert_eq!(drained[0].code, "BUFFERED_WARN"); - } - - #[derive(Default)] - struct FlushTrackingWriter { - writes: Vec, - flush_count: usize, - } - - impl Write for FlushTrackingWriter { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - self.writes.extend_from_slice(buf); - Ok(buf.len()) - } - - fn flush(&mut self) -> std::io::Result<()> { - self.flush_count += 1; - Ok(()) - } - } - - #[test] - fn test_write_output_line_flushes_each_message() { - let mut writer = FlushTrackingWriter::default(); - - write_output_line(&mut writer, "### hello").unwrap(); - - assert_eq!(String::from_utf8(writer.writes).unwrap(), "### hello\n"); - assert_eq!(writer.flush_count, 1); - } -} diff --git a/sdk/src/infra/logger/core.rs b/sdk/src/infra/logger/core.rs new file mode 100644 index 00000000..47191a45 --- /dev/null +++ b/sdk/src/infra/logger/core.rs @@ -0,0 +1,263 @@ +use std::sync::atomic::{AtomicU8, Ordering}; +use std::time::{Duration, Instant}; + +use serde::Serialize; +use serde_json::Value; + +use super::diagnostic::{DiagnosticInput, invalid_record, record_from_input, validate_diagnostic_input}; +use super::sink::buffer_diagnostic; + +// --------------------------------------------------------------------------- +// Log levels +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize)] +#[serde(rename_all = "lowercase")] +pub enum LogLevel { + Silent, + Fatal, + Error, + Warn, + Info, + Debug, + Trace, +} + +impl LogLevel { + pub fn priority(self) -> u8 { + match self { + Self::Silent => 0, + Self::Fatal => 1, + Self::Error => 2, + Self::Warn => 3, + Self::Info => 4, + Self::Debug => 5, + Self::Trace => 6, + } + } + + pub fn as_str(self) -> &'static str { + match self { + Self::Silent => "silent", + Self::Fatal => "fatal", + Self::Error => "error", + Self::Warn => "warn", + Self::Info => "info", + Self::Debug => "debug", + Self::Trace => "trace", + } + } + + pub fn from_str_loose(s: &str) -> Option { + match s.to_ascii_lowercase().as_str() { + "silent" => Some(Self::Silent), + "fatal" => Some(Self::Fatal), + "error" => Some(Self::Error), + "warn" => Some(Self::Warn), + "info" => Some(Self::Info), + "debug" => Some(Self::Debug), + "trace" => Some(Self::Trace), + _ => None, + } + } +} + +// --------------------------------------------------------------------------- +// Span +// --------------------------------------------------------------------------- + +/// An operation span that tracks timing and nesting. +#[derive(Debug, Clone)] +pub struct Span { + pub name: String, + pub namespace: String, + pub start: Instant, +} + +impl Span { + pub fn new(name: &str, namespace: &str) -> Self { + Self { + name: name.to_string(), + namespace: namespace.to_string(), + start: Instant::now(), + } + } + + pub fn enter(&self) -> SpanGuard { + SpanGuard::new(self.clone()) + } + + pub fn duration(&self) -> Duration { + self.start.elapsed() + } +} + +/// RAII guard that emits span exit event on drop. +pub struct SpanGuard { + span: Span, + exited: bool, +} + +impl SpanGuard { + fn new(span: Span) -> Self { + // Emit span enter event immediately + crate::infra::logger::sink::write_span_enter(&span); + Self { span, exited: false } + } + + pub fn exit(mut self) { + self.do_exit(); + } + + fn do_exit(&mut self) { + if self.exited { + return; + } + self.exited = true; + crate::infra::logger::sink::write_span_exit(&self.span); + } +} + +impl Drop for SpanGuard { + fn drop(&mut self) { + self.do_exit(); + } +} + +// --------------------------------------------------------------------------- +// Event +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone)] +pub struct Event { + pub level: LogLevel, + pub namespace: String, + pub message: Value, + pub meta: Option, + pub span_name: Option, +} + +// --------------------------------------------------------------------------- +// Logger +// --------------------------------------------------------------------------- + +/// A namespaced logger with configurable level. +pub struct Logger { + pub namespace: String, + pub level: LogLevel, +} + +impl Logger { + pub fn new(namespace: &str, level: LogLevel) -> Self { + Self { + namespace: namespace.to_string(), + level, + } + } + + pub fn info(&self, message: impl Into, meta: Option) { + self.log_message(LogLevel::Info, message.into(), meta); + } + + pub fn debug(&self, message: impl Into, meta: Option) { + self.log_message(LogLevel::Debug, message.into(), meta); + } + + pub fn trace(&self, message: impl Into, meta: Option) { + self.log_message(LogLevel::Trace, message.into(), meta); + } + + pub fn warn(&self, diagnostic: DiagnosticInput) { + self.log_diagnostic(LogLevel::Warn, diagnostic); + } + + pub fn error(&self, diagnostic: DiagnosticInput) { + self.log_diagnostic(LogLevel::Error, diagnostic); + } + + pub fn fatal(&self, diagnostic: DiagnosticInput) { + self.log_diagnostic(LogLevel::Fatal, diagnostic); + } + + pub fn span(&self, name: &str) -> Span { + Span::new(name, &self.namespace) + } + + fn should_emit(&self, level: LogLevel) -> bool { + level.priority() <= self.level.priority() + } + + fn log_message(&self, level: LogLevel, message: Value, meta: Option) { + if !self.should_emit(level) { + return; + } + let event = Event { + level, + namespace: self.namespace.clone(), + message, + meta, + span_name: None, + }; + crate::infra::logger::sink::write_event(&event); + } + + fn log_diagnostic(&self, level: LogLevel, diagnostic: DiagnosticInput) { + let record = match validate_diagnostic_input(&diagnostic) { + Ok(()) => record_from_input(&self.namespace, level.as_str(), diagnostic), + Err(errors) => { + invalid_record(&self.namespace, level.as_str(), serde_json::to_value(&diagnostic).unwrap_or_default(), &errors) + } + }; + + // Buffer diagnostics even if level is Silent + buffer_diagnostic(&record); + + if !self.should_emit(level) { + return; + } + + let event = Event { + level, + namespace: self.namespace.clone(), + message: serde_json::to_value(&record).unwrap_or_default(), + meta: None, + span_name: None, + }; + crate::infra::logger::sink::write_event(&event); + } +} + +// --------------------------------------------------------------------------- +// Global state +// --------------------------------------------------------------------------- + +static GLOBAL_LEVEL: AtomicU8 = AtomicU8::new(4); // Info default + +pub fn set_global_level(level: LogLevel) { + GLOBAL_LEVEL.store(level.priority(), Ordering::Relaxed); +} + +pub fn get_global_level() -> LogLevel { + match GLOBAL_LEVEL.load(Ordering::Relaxed) { + 0 => LogLevel::Silent, + 1 => LogLevel::Fatal, + 2 => LogLevel::Error, + 3 => LogLevel::Warn, + 4 => LogLevel::Info, + 5 => LogLevel::Debug, + 6 => LogLevel::Trace, + _ => LogLevel::Info, + } +} + +pub fn resolve_level(explicit: Option) -> LogLevel { + if let Some(l) = explicit { + return l; + } + if let Ok(env_val) = std::env::var("LOG_LEVEL") + && let Some(l) = LogLevel::from_str_loose(&env_val) + { + return l; + } + get_global_level() +} diff --git a/sdk/src/infra/logger/diagnostic.rs b/sdk/src/infra/logger/diagnostic.rs new file mode 100644 index 00000000..22223c71 --- /dev/null +++ b/sdk/src/infra/logger/diagnostic.rs @@ -0,0 +1,217 @@ +//! Structured diagnostic types for error/warning/fatal logging. + +use serde::{Deserialize, Serialize}; +use serde_json::{Map, Value}; + +/// Input schema for a structured diagnostic log entry. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DiagnosticInput { + pub code: String, + pub title: String, + pub root_cause: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub exact_fix: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub possible_fixes: Option>>, + #[serde(skip_serializing_if = "Option::is_none")] + pub details: Option>, +} + +/// Full diagnostic record including runtime metadata. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DiagnosticRecord { + pub code: String, + pub title: String, + pub root_cause: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub exact_fix: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub possible_fixes: Option>>, + #[serde(skip_serializing_if = "Option::is_none")] + pub details: Option>, + pub level: String, + pub namespace: String, + pub copy_text: Vec, +} + +/// Validate a diagnostic input for required fields. +pub fn validate_diagnostic_input(input: &DiagnosticInput) -> Result<(), Vec> { + let mut errors: Vec = Vec::new(); + + if input.code.trim().is_empty() { + errors.push("code must be a non-empty string".to_string()); + } + if input.title.trim().is_empty() { + errors.push("title must be a non-empty string".to_string()); + } + if input.root_cause.is_empty() { + errors.push("rootCause must contain at least one line".to_string()); + } + + if let Some(lines) = &input.exact_fix && lines.is_empty() { + errors.push("exactFix must contain at least one line when provided".to_string()); + } + + if let Some(fixes) = &input.possible_fixes { + if fixes.is_empty() { + errors.push("possibleFixes must contain at least one fix when provided".to_string()); + } + for (index, lines) in fixes.iter().enumerate() { + if lines.is_empty() { + errors.push(format!("possibleFixes[{index}] must contain at least one line")); + } + } + } + + if errors.is_empty() { + Ok(()) + } else { + Err(errors) + } +} + +/// Build copy-friendly text from a diagnostic record. +pub fn build_copy_text(record: &DiagnosticRecord) -> Vec { + let mut lines = vec![record.title.clone()]; + + append_section(&mut lines, "**What happened**", &record.root_cause, None); + + if let Some(exact_fix) = &record.exact_fix { + append_section(&mut lines, "**Do this**", exact_fix, None); + } + + if let Some(possible_fixes) = &record.possible_fixes + && !possible_fixes.is_empty() + { + if !lines.is_empty() { + lines.push(String::new()); + } + lines.push("**Try this if needed**".to_string()); + for (index, fix) in possible_fixes.iter().enumerate() { + let mut iter = fix.iter(); + if let Some(first) = iter.next() { + lines.push(format!(" {}. {}", index + 1, first)); + } + for entry in iter { + lines.push(format!(" {entry}")); + } + } + } + + if let Some(details) = &record.details + && !details.is_empty() + { + if !lines.is_empty() { + lines.push(String::new()); + } + lines.push("**Context**".to_string()); + let mut detail_lines = value_to_markdown_lines(&Value::Object(details.clone())); + for line in &mut detail_lines { + line.insert_str(0, " "); + } + lines.extend(detail_lines); + } + + lines +} + +fn append_section( + lines: &mut Vec, + title: &str, + entries: &[String], + numbered: Option, +) { + if entries.is_empty() { + return; + } + + if !lines.is_empty() { + lines.push(String::new()); + } + + if !title.is_empty() { + lines.push(title.to_string()); + } + + match numbered { + Some(number) => { + let mut iter = entries.iter(); + if let Some(first) = iter.next() { + lines.push(format!(" {number}. {first}")); + } + for entry in iter { + lines.push(format!(" {entry}")); + } + } + None => { + for entry in entries { + lines.push(format!(" - {entry}")); + } + } + } +} + +use super::formatter::value_to_markdown_lines; + +/// Build a diagnostic record from validated input. +pub fn record_from_input( + namespace: &str, + level: &str, + input: DiagnosticInput, +) -> DiagnosticRecord { + let mut record = DiagnosticRecord { + code: input.code.trim().to_string(), + title: input.title.trim().to_string(), + root_cause: input.root_cause, + exact_fix: input.exact_fix, + possible_fixes: input.possible_fixes, + details: input.details, + level: level.to_string(), + namespace: namespace.to_string(), + copy_text: Vec::new(), + }; + record.copy_text = build_copy_text(&record); + record +} + +/// Build a fallback diagnostic record for invalid input. +pub fn invalid_record( + namespace: &str, + level: &str, + raw_payload: Value, + validation_errors: &[String], +) -> DiagnosticRecord { + let mut details = Map::new(); + details.insert("rawPayload".to_string(), raw_payload); + details.insert( + "validationErrors".to_string(), + Value::Array( + validation_errors + .iter() + .map(|e| Value::String(e.clone())) + .collect(), + ), + ); + + let mut record = DiagnosticRecord { + code: "LOGGER_DIAGNOSTIC_SCHEMA_INVALID".to_string(), + title: "Logger diagnostic payload is invalid".to_string(), + root_cause: vec![ + "The logger received a warn/error/fatal payload that does not match the required diagnostic schema.".to_string(), + format!("Validation issues: {}", validation_errors.join("; ")), + ], + exact_fix: Some(vec![ + "Pass a diagnostic object with non-empty code, title, and rootCause fields.".to_string(), + "Keep exactFix and each possibleFixes entry as non-empty string arrays when they are present.".to_string(), + ]), + possible_fixes: None, + details: Some(details), + level: level.to_string(), + namespace: namespace.to_string(), + copy_text: Vec::new(), + }; + record.copy_text = build_copy_text(&record); + record +} diff --git a/sdk/src/infra/logger/formatter.rs b/sdk/src/infra/logger/formatter.rs new file mode 100644 index 00000000..768ba641 --- /dev/null +++ b/sdk/src/infra/logger/formatter.rs @@ -0,0 +1,228 @@ +use serde_json::Value; + +use super::core::{Event, LogLevel, Span}; + +/// Format an event as Markdown. +pub fn format_event(event: &Event) -> String { + match event.level { + LogLevel::Warn | LogLevel::Error | LogLevel::Fatal => { + format_diagnostic_event(event) + } + _ => { + format_message_event(event) + } + } +} + +/// Format a span enter event. +pub fn format_span_enter(span: &Span) -> String { + format!("### {} started", span.name) +} + +/// Format a span exit event with duration. +pub fn format_span_exit(span: &Span) -> String { + let duration_ms = span.duration().as_millis(); + format!("### {} completed\n - duration: {}ms", span.name, duration_ms) +} + +fn format_message_event(event: &Event) -> String { + let (title, meta_lines) = extract_message_and_meta(&event.message, event.meta.as_ref()); + let mut lines = Vec::new(); + + if let Some(title) = title { + if title.contains('\n') { + let parts: Vec<&str> = title.splitn(2, '\n').collect(); + lines.push(format!("### {}", parts[0].trim())); + lines.push(String::new()); + lines.push(parts[1].trim().to_string()); + } else { + lines.push(format!("### {}", title)); + } + } else { + lines.push("### Details".to_string()); + } + + if !meta_lines.is_empty() { + lines.push(String::new()); + lines.extend(meta_lines); + } + + lines.join("\n") +} + +fn format_diagnostic_event(event: &Event) -> String { + // For diagnostic events, the message contains the serialized DiagnosticRecord + let record: super::diagnostic::DiagnosticRecord = match serde_json::from_value(event.message.clone()) { + Ok(r) => r, + Err(_) => return "### Diagnostic error\n - failed to parse diagnostic record".to_string(), + }; + + let mut lines = vec![format!("### {}", record.title)]; + + if !record.root_cause.is_empty() { + lines.push(String::new()); + lines.push("**What happened**".to_string()); + for cause in &record.root_cause { + lines.push(format!(" - {cause}")); + } + } + + if let Some(exact_fix) = &record.exact_fix + && !exact_fix.is_empty() + { + lines.push(String::new()); + lines.push("**Do this**".to_string()); + for fix in exact_fix { + lines.push(format!(" - {fix}")); + } + } + + if let Some(possible_fixes) = &record.possible_fixes + && !possible_fixes.is_empty() + { + lines.push(String::new()); + lines.push("**Try this if needed**".to_string()); + for (index, fix) in possible_fixes.iter().enumerate() { + let mut iter = fix.iter(); + if let Some(first) = iter.next() { + lines.push(format!(" {}. {}", index + 1, first)); + } + for entry in iter { + lines.push(format!(" {entry}")); + } + } + } + + if let Some(details) = &record.details + && !details.is_empty() + { + lines.push(String::new()); + lines.push("**Context**".to_string()); + let mut detail_lines = value_to_markdown_lines(&Value::Object(details.clone())); + for line in &mut detail_lines { + line.insert_str(0, " "); + } + lines.extend(detail_lines); + } + + lines.join("\n") +} + +fn extract_message_and_meta(message: &Value, meta: Option<&Value>) -> (Option, Vec) { + let (msg, mut lines) = match message { + Value::String(s) => (Some(s.clone()), Vec::new()), + Value::Object(map) => { + if let Some(Value::String(msg)) = map.get("message") { + let mut remainder = map.clone(); + remainder.remove("message"); + let lines = if remainder.is_empty() { + Vec::new() + } else { + value_to_markdown_lines(&Value::Object(remainder)) + }; + (Some(msg.clone()), lines) + } else if map.len() == 1 { + let (key, val) = map.iter().next().unwrap(); + match val { + Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => { + (Some(format!("{key}: {}", scalar_to_text(val))), Vec::new()) + } + Value::Array(items) if !items.is_empty() => { + (Some(key.clone()), value_to_markdown_lines(val)) + } + Value::Object(obj) if !obj.is_empty() => { + (Some(key.clone()), value_to_markdown_lines(val)) + } + _ => (None, value_to_markdown_lines(message)), + } + } else { + (None, value_to_markdown_lines(message)) + } + } + _ => (None, value_to_markdown_lines(message)), + }; + + // Merge external meta if provided + if let Some(meta_val) = meta + && !meta_val.is_null() + { + let meta_lines = value_to_markdown_lines(meta_val); + lines.extend(meta_lines); + } + + (msg, lines) +} + +pub(crate) fn value_to_markdown_lines(value: &Value) -> Vec { + let mut lines = Vec::new(); + append_markdown_value(&mut lines, None, value, 0); + lines +} + +pub(crate) fn append_markdown_value(lines: &mut Vec, label: Option<&str>, value: &Value, depth: usize) { + let prefix = " ".repeat(depth); + let bullet = format!("{prefix}- "); + + match value { + Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => match label { + Some(name) => { + lines.push(format!("{bullet}{name}: {}", scalar_to_text(value))); + } + None => { + lines.push(format!("{bullet}{}", scalar_to_text(value))); + } + }, + Value::Array(items) => { + if items.is_empty() { + match label { + Some(name) => lines.push(format!("{bullet}{name}: []")), + None => lines.push(format!("{bullet}[]")), + } + return; + } + + if let Some(name) = label { + lines.push(format!("{bullet}{name}:")); + for item in items { + append_markdown_value(lines, None, item, depth + 1); + } + return; + } + + for item in items { + append_markdown_value(lines, None, item, depth); + } + } + Value::Object(map) => { + if map.is_empty() { + match label { + Some(name) => lines.push(format!("{bullet}{name}: {{}}")), + None => lines.push(format!("{bullet}{{}}")), + } + return; + } + + if let Some(name) = label { + lines.push(format!("{bullet}{name}:")); + for (key, nested) in map { + append_markdown_value(lines, Some(key), nested, depth + 1); + } + return; + } + + for (key, nested) in map { + append_markdown_value(lines, Some(key), nested, depth); + } + } + } +} + +pub(crate) fn scalar_to_text(value: &Value) -> String { + match value { + Value::Null => "null".to_string(), + Value::Bool(b) => b.to_string(), + Value::Number(n) => n.to_string(), + Value::String(s) => s.clone(), + Value::Array(_) | Value::Object(_) => serde_json::to_string(value).unwrap_or_default(), + } +} diff --git a/sdk/src/infra/logger/mod.rs b/sdk/src/infra/logger/mod.rs new file mode 100644 index 00000000..39009be4 --- /dev/null +++ b/sdk/src/infra/logger/mod.rs @@ -0,0 +1,305 @@ +#![deny(clippy::all)] + +//! Structured Markdown logger with span tracking for observability. +//! +//! Output format: Markdown only. No JSON, no ANSI colors. +//! Destination: stdout for info/debug/trace, stderr for warn/error/fatal. + +pub mod core; +pub mod diagnostic; +pub mod formatter; +pub mod sink; + +pub use core::{LogLevel, Logger, Span, SpanGuard, get_global_level, resolve_level, set_global_level}; +pub use diagnostic::{DiagnosticInput, DiagnosticRecord, validate_diagnostic_input}; +pub use sink::{clear_diagnostics, drain_diagnostics, flush}; + +// Legacy re-exports for backward compatibility during migration +pub use diagnostic::DiagnosticInput as LoggerDiagnosticInput; +pub use diagnostic::DiagnosticRecord as LoggerDiagnosticRecord; + +/// Create a new logger with optional explicit level. +/// Falls back to global level or environment variable `LOG_LEVEL`. +pub fn create_logger(namespace: &str, explicit_level: Option) -> Logger { + let level = resolve_level(explicit_level); + Logger::new(namespace, level) +} + +// --------------------------------------------------------------------------- +// Convenience macros +// --------------------------------------------------------------------------- + +#[macro_export] +macro_rules! info { + ($logger:expr, $msg:expr) => { + $logger.info(serde_json::Value::String($msg.to_string()), None) + }; + ($logger:expr, $msg:expr, $meta:expr) => { + $logger.info(serde_json::Value::String($msg.to_string()), Some($meta)) + }; +} + +#[macro_export] +macro_rules! debug { + ($logger:expr, $msg:expr) => { + $logger.debug(serde_json::Value::String($msg.to_string()), None) + }; + ($logger:expr, $msg:expr, $meta:expr) => { + $logger.debug(serde_json::Value::String($msg.to_string()), Some($meta)) + }; +} + +#[macro_export] +macro_rules! trace { + ($logger:expr, $msg:expr) => { + $logger.trace(serde_json::Value::String($msg.to_string()), None) + }; + ($logger:expr, $msg:expr, $meta:expr) => { + $logger.trace(serde_json::Value::String($msg.to_string()), Some($meta)) + }; +} + +#[macro_export] +macro_rules! warn { + ($logger:expr, $diag:expr) => { + $logger.warn($diag) + }; +} + +#[macro_export] +macro_rules! error { + ($logger:expr, $diag:expr) => { + $logger.error($diag) + }; +} + +#[macro_export] +macro_rules! fatal { + ($logger:expr, $diag:expr) => { + $logger.fatal($diag) + }; +} + +#[macro_export] +macro_rules! span { + ($logger:expr, $name:expr) => { + $logger.span($name) + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use crate::infra::logger::core::Event; + use serde_json::Value; + + #[test] + fn test_log_level_priority_ordering() { + assert!(LogLevel::Silent.priority() < LogLevel::Fatal.priority()); + assert!(LogLevel::Fatal.priority() < LogLevel::Error.priority()); + assert!(LogLevel::Error.priority() < LogLevel::Warn.priority()); + assert!(LogLevel::Warn.priority() < LogLevel::Info.priority()); + assert!(LogLevel::Info.priority() < LogLevel::Debug.priority()); + assert!(LogLevel::Debug.priority() < LogLevel::Trace.priority()); + } + + #[test] + fn test_log_level_from_str_case_insensitive() { + assert_eq!(LogLevel::from_str_loose("info"), Some(LogLevel::Info)); + assert_eq!(LogLevel::from_str_loose("INFO"), Some(LogLevel::Info)); + assert_eq!(LogLevel::from_str_loose("Debug"), Some(LogLevel::Debug)); + assert_eq!(LogLevel::from_str_loose("unknown"), None); + } + + #[test] + fn test_create_logger_uses_global_level() { + set_global_level(LogLevel::Debug); + let logger = create_logger("test", None); + assert_eq!(logger.level, LogLevel::Debug); + set_global_level(LogLevel::Info); // reset + } + + #[test] + fn test_create_logger_uses_explicit_level() { + set_global_level(LogLevel::Info); + let logger = create_logger("test", Some(LogLevel::Warn)); + assert_eq!(logger.level, LogLevel::Warn); + } + + #[test] + fn test_logger_filters_by_level() { + let logger = Logger::new("test", LogLevel::Warn); + // These should not panic or emit; just verify they don't crash + logger.info("should be filtered", None); + logger.debug("should be filtered", None); + logger.trace("should be filtered", None); + // Warn, Error, Fatal should be emitted (but we can't easily capture in unit test) + } + + #[test] + fn test_span_creation() { + let span = Span::new("test-span", "test-ns"); + assert_eq!(span.name, "test-span"); + assert_eq!(span.namespace, "test-ns"); + } + + #[test] + fn test_span_tracks_duration() { + let span = Span::new("test", "ns"); + std::thread::sleep(std::time::Duration::from_millis(1)); + let duration = span.duration(); + assert!(duration > std::time::Duration::ZERO); + } + + #[test] + fn test_diagnostic_validation_rejects_empty_fields() { + let diag = DiagnosticInput { + code: "".to_string(), + title: "".to_string(), + root_cause: vec![], + exact_fix: None, + possible_fixes: None, + details: None, + }; + let result = validate_diagnostic_input(&diag); + assert!(result.is_err()); + let errors = result.unwrap_err(); + assert!(errors.iter().any(|e| e.contains("code"))); + assert!(errors.iter().any(|e| e.contains("title"))); + assert!(errors.iter().any(|e| e.contains("rootCause"))); + } + + #[test] + fn test_diagnostic_validation_accepts_valid_input() { + let diag = DiagnosticInput { + code: "TEST".to_string(), + title: "Test diagnostic".to_string(), + root_cause: vec!["Something went wrong".to_string()], + exact_fix: Some(vec!["Fix it".to_string()]), + possible_fixes: None, + details: None, + }; + assert!(validate_diagnostic_input(&diag).is_ok()); + } + + #[test] + fn test_diagnostic_buffering() { + clear_diagnostics(); + let record = DiagnosticRecord { + code: "BUF_TEST".to_string(), + title: "Buffered".to_string(), + root_cause: vec!["test".to_string()], + exact_fix: None, + possible_fixes: None, + details: None, + level: "warn".to_string(), + namespace: "test".to_string(), + copy_text: vec![], + }; + sink::buffer_diagnostic(&record); + let drained = drain_diagnostics(); + assert_eq!(drained.len(), 1); + assert_eq!(drained[0].code, "BUF_TEST"); + } + + #[test] + fn test_flush_completes_without_panic() { + // Just verify flush doesn't panic + flush(); + } + + #[test] + fn test_global_level_get_set() { + let original = get_global_level(); + set_global_level(LogLevel::Debug); + assert_eq!(get_global_level(), LogLevel::Debug); + set_global_level(LogLevel::Trace); + assert_eq!(get_global_level(), LogLevel::Trace); + set_global_level(original); // restore + } + + #[test] + fn test_resolve_level_explicit_wins() { + set_global_level(LogLevel::Info); + let level = resolve_level(Some(LogLevel::Error)); + assert_eq!(level, LogLevel::Error); + } + + #[test] + fn test_resolve_level_fallback_to_global() { + set_global_level(LogLevel::Warn); + unsafe { std::env::remove_var("LOG_LEVEL"); } + let level = resolve_level(None); + assert_eq!(level, LogLevel::Warn); + } + + #[test] + fn test_thread_safety() { + use std::sync::Arc; + use std::thread; + + let logger = Arc::new(Logger::new("thread-test", LogLevel::Trace)); + let mut handles = Vec::new(); + + for i in 0..10 { + let log = Arc::clone(&logger); + handles.push(thread::spawn(move || { + log.info(format!("thread-{i}"), None); + log.debug(format!("debug-{i}"), None); + let _span = log.span(format!("span-{i}").as_str()).enter(); + log.warn(DiagnosticInput { + code: format!("WARN-{i}"), + title: format!("Warning {i}"), + root_cause: vec!["test".to_string()], + exact_fix: None, + possible_fixes: None, + details: None, + }); + })); + } + + for h in handles { + h.join().expect("thread should not panic"); + } + + // Verify flush completes without deadlock + flush(); + } + + #[test] + fn test_sink_stderr_routing_for_errors() { + // Verify that error/fatal/warn events are routed to stderr + // by checking the internal use_stderr logic via a controlled event. + let warn_event = Event { + level: LogLevel::Warn, + namespace: "test".to_string(), + message: Value::String("warn msg".to_string()), + meta: None, + span_name: None, + }; + let error_event = Event { + level: LogLevel::Error, + namespace: "test".to_string(), + message: Value::String("error msg".to_string()), + meta: None, + span_name: None, + }; + let info_event = Event { + level: LogLevel::Info, + namespace: "test".to_string(), + message: Value::String("info msg".to_string()), + meta: None, + span_name: None, + }; + + // These should not panic; stderr routing is verified by the sink's use_stderr logic. + sink::write_event(&warn_event); + sink::write_event(&error_event); + sink::write_event(&info_event); + flush(); + } +} diff --git a/sdk/src/infra/logger/sink.rs b/sdk/src/infra/logger/sink.rs new file mode 100644 index 00000000..2aab9a9c --- /dev/null +++ b/sdk/src/infra/logger/sink.rs @@ -0,0 +1,133 @@ +use std::io::{self, Write}; +use std::sync::mpsc::{self, Sender, Receiver}; +use std::sync::{LazyLock, Mutex}; +use std::thread; + +use super::core::{Event, LogLevel, Span}; +use super::diagnostic::DiagnosticRecord; +use super::formatter; + +// --------------------------------------------------------------------------- +// Output command types +// --------------------------------------------------------------------------- + +enum OutputCommand { + Write { use_stderr: bool, output: String }, + Flush { ack: Sender<()> }, +} + +// --------------------------------------------------------------------------- +// Global state +// --------------------------------------------------------------------------- + +static OUTPUT_SINK: LazyLock> = LazyLock::new(spawn_output_sink); +static DIAGNOSTIC_BUFFER: LazyLock>> = + LazyLock::new(|| Mutex::new(Vec::new())); + +// --------------------------------------------------------------------------- +// Public API +// --------------------------------------------------------------------------- + +pub fn write_event(event: &Event) { + let use_stderr = matches!(event.level, LogLevel::Error | LogLevel::Fatal | LogLevel::Warn); + let output = formatter::format_event(event); + send_output(use_stderr, output); +} + +pub fn write_span_enter(span: &Span) { + let output = formatter::format_span_enter(span); + send_output(false, output); +} + +pub fn write_span_exit(span: &Span) { + let output = formatter::format_span_exit(span); + send_output(false, output); +} + +pub fn buffer_diagnostic(record: &DiagnosticRecord) { + if let Ok(mut buf) = DIAGNOSTIC_BUFFER.lock() { + buf.push(record.clone()); + } +} + +pub fn drain_diagnostics() -> Vec { + match DIAGNOSTIC_BUFFER.lock() { + Ok(mut buf) => std::mem::take(&mut *buf), + Err(_) => Vec::new(), + } +} + +pub fn clear_diagnostics() { + if let Ok(mut buf) = DIAGNOSTIC_BUFFER.lock() { + buf.clear(); + } +} + +pub fn flush() { + let (ack_tx, ack_rx) = mpsc::channel(); + if OUTPUT_SINK.send(OutputCommand::Flush { ack: ack_tx }).is_ok() { + let _ = ack_rx.recv(); + } +} + +// --------------------------------------------------------------------------- +// Internal helpers +// --------------------------------------------------------------------------- + +fn send_output(use_stderr: bool, output: String) { + if OUTPUT_SINK + .send(OutputCommand::Write { use_stderr, output: output.clone() }) + .is_err() + { + // Fallback: write directly if sink thread is dead + write_direct(use_stderr, &output); + } +} + +fn write_direct(use_stderr: bool, output: &str) { + if use_stderr { + let mut stderr = io::stderr().lock(); + let _ = writeln!(stderr, "{output}"); + let _ = stderr.flush(); + } else { + let mut stdout = io::stdout().lock(); + let _ = writeln!(stdout, "{output}"); + let _ = stdout.flush(); + } +} + +fn spawn_output_sink() -> Sender { + let (tx, rx) = mpsc::channel(); + thread::Builder::new() + .name("tnmsd-logger".to_string()) + .spawn(move || output_worker(rx)) + .expect("failed to spawn logger output worker"); + tx +} + +fn output_worker(receiver: Receiver) { + let stdout = io::stdout(); + let stderr = io::stderr(); + let mut stdout_writer = io::BufWriter::new(stdout); + let mut stderr_writer = io::BufWriter::new(stderr); + + while let Ok(command) = receiver.recv() { + match command { + OutputCommand::Write { use_stderr, output } => { + if use_stderr { + let _ = writeln!(stderr_writer, "{output}"); + } else { + let _ = writeln!(stdout_writer, "{output}"); + } + } + OutputCommand::Flush { ack } => { + let _ = stdout_writer.flush(); + let _ = stderr_writer.flush(); + let _ = ack.send(()); + } + } + } + + let _ = stdout_writer.flush(); + let _ = stderr_writer.flush(); +} diff --git a/sdk/src/infra/mod.rs b/sdk/src/infra/mod.rs index 5a1c988d..f96a629c 100644 --- a/sdk/src/infra/mod.rs +++ b/sdk/src/infra/mod.rs @@ -2,6 +2,7 @@ pub mod deno_runtime; pub mod desk_paths; pub mod diagnostic_helpers; pub mod file_ops; +pub mod git_fs; pub mod logger; pub mod md_compiler; pub mod path_types; @@ -12,4 +13,5 @@ pub use file_ops::{ write_file_sync, }; +pub use git_fs::{find_all_git_repos, resolve_git_info_dir}; pub use path_types::{FilePathKind, RelativePath, RootPath}; diff --git a/sdk/src/infra/script_runtime.rs b/sdk/src/infra/script_runtime.rs index ba52aaff..0d74ee15 100644 --- a/sdk/src/infra/script_runtime.rs +++ b/sdk/src/infra/script_runtime.rs @@ -408,7 +408,7 @@ mod tests { assert_eq!(proxied, "____.git/info/exclude"); let validated = validate_public_path_impl(&proxied, "/tmp/ws/aindex/public")?; - assert_eq!(validated, "____.git/info/exclude"); + assert_eq!(validated.replace('\\', "/"), "____.git/info/exclude"); Ok(()) } @@ -418,7 +418,7 @@ mod tests { assert_eq!(proxied, "____vscode/settings.json"); let validated = validate_public_path_impl(&proxied, "/tmp/ws/aindex/public")?; - assert_eq!(validated, "____vscode/settings.json"); + assert_eq!(validated.replace('\\', "/"), "____vscode/settings.json"); Ok(()) } @@ -438,7 +438,7 @@ mod tests { assert_eq!(proxied, "____idea/.gitignore"); let validated = validate_public_path_impl(&proxied, "/tmp/ws/aindex/public")?; - assert_eq!(validated, "____idea/.gitignore"); + assert_eq!(validated.replace('\\', "/"), "____idea/.gitignore"); Ok(()) } @@ -467,7 +467,7 @@ console.log(`generated/${ctx.logicalPath}`) serde_json::json!({ "scope": "skill" }), )?; - assert_eq!(resolved, "generated/daily/note.md"); + assert_eq!(resolved.replace('\\', "/"), "generated/daily/note.md"); Ok(()) } } diff --git a/sdk/src/lib.rs b/sdk/src/lib.rs index 0686b885..72e2ed10 100644 --- a/sdk/src/lib.rs +++ b/sdk/src/lib.rs @@ -18,7 +18,7 @@ pub use infra::md_compiler; pub use services::clean_service; pub use services::dry_run_service; pub use services::install_service; -pub use services::prompts::{ +pub use services::prompt_service::{ ListPromptsOptions, ManagedPromptKind, PromptArtifactRecord, PromptArtifactState, PromptCatalogItem, PromptCatalogPaths, PromptCatalogPresence, PromptDetails, PromptServiceOptions, PromptSourceLocale, UpsertPromptSourceInput, WritePromptArtifactsInput, diff --git a/sdk/src/policy/cleanup.rs b/sdk/src/policy/cleanup.rs index 2edb4d83..cc176b92 100644 --- a/sdk/src/policy/cleanup.rs +++ b/sdk/src/policy/cleanup.rs @@ -4,10 +4,15 @@ use std::fs; use std::path::{Component, Path, PathBuf}; use globset::{Glob, GlobBuilder, GlobSet, GlobSetBuilder}; -use serde::{Deserialize, Serialize}; use serde_json::json; use walkdir::WalkDir; +pub use crate::domain::cleanup::{ + CleanupDeclarationsDto, CleanupErrorDto, CleanupErrorKindDto, CleanupExecutionResultDto, + CleanupPlan, CleanupProtectionConflictDto, CleanupSnapshot, CleanupTargetDto, + CleanupTargetKindDto, PluginCleanupSnapshotDto, ProtectedPathViolationDto, ProtectedRuleDto, + ProtectionModeDto, ProtectionRuleMatcherDto, +}; use crate::domain::config; use crate::infra::desk_paths; use crate::infra::logger::create_logger; @@ -39,149 +44,6 @@ const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES: [&str; 15] = [ "volumes", ]; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum ProtectionModeDto { - Direct, - Recursive, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum ProtectionRuleMatcherDto { - Path, - Glob, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum CleanupTargetKindDto { - File, - Directory, - Glob, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum CleanupErrorKindDto { - File, - Directory, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupTargetDto { - pub path: String, - pub kind: CleanupTargetKindDto, - #[serde(default)] - pub exclude_basenames: Vec, - pub protection_mode: Option, - pub scope: Option, - pub label: Option, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupDeclarationsDto { - #[serde(default)] - pub delete: Vec, - #[serde(default)] - pub protect: Vec, - #[serde(default)] - pub exclude_scan_globs: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct PluginCleanupSnapshotDto { - pub plugin_name: String, - #[serde(default)] - pub outputs: Vec, - #[serde(default)] - pub cleanup: CleanupDeclarationsDto, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ProtectedRuleDto { - pub path: String, - pub protection_mode: ProtectionModeDto, - pub reason: String, - pub source: String, - pub matcher: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupSnapshot { - pub workspace_dir: String, - pub aindex_dir: Option, - #[serde(default)] - pub project_roots: Vec, - #[serde(default)] - pub protected_rules: Vec, - #[serde(default)] - pub plugin_snapshots: Vec, - /// Glob patterns from aindex.config.ts that should be excluded from - /// the empty-directory scanner (git-style ** patterns supported). - #[serde(default)] - pub empty_dir_exclude_globs: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ProtectedPathViolationDto { - pub target_path: String, - pub protected_path: String, - pub protection_mode: ProtectionModeDto, - pub reason: String, - pub source: String, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupProtectionConflictDto { - pub output_path: String, - pub output_plugin: String, - pub protected_path: String, - pub protection_mode: ProtectionModeDto, - pub protected_by: String, - pub reason: String, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupPlan { - pub files_to_delete: Vec, - pub dirs_to_delete: Vec, - pub empty_dirs_to_delete: Vec, - pub violations: Vec, - pub conflicts: Vec, - pub excluded_scan_globs: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupErrorDto { - pub path: String, - pub kind: CleanupErrorKindDto, - pub error: String, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupExecutionResultDto { - pub deleted_files: usize, - pub deleted_dirs: usize, - pub errors: Vec, - pub violations: Vec, - pub conflicts: Vec, - pub files_to_delete: Vec, - pub dirs_to_delete: Vec, - pub empty_dirs_to_delete: Vec, - pub excluded_scan_globs: Vec, -} - #[derive(Debug, Clone)] struct CompiledProtectedRule { path: String, @@ -539,7 +401,7 @@ impl BatchedGlobPlanner { .count(); let glob_pattern_count = self.normalized_patterns.len() - literal_pattern_count; - crate::log_debug!( + crate::debug!( logger, "cleanup native glob execute started", json!({ @@ -600,7 +462,7 @@ impl BatchedGlobPlanner { literal_match_count += 1; } - crate::log_debug!( + crate::debug!( logger, "cleanup native glob literal processing complete", json!({ @@ -686,7 +548,7 @@ impl BatchedGlobPlanner { } } - crate::log_debug!( + crate::debug!( logger, "cleanup native glob group walks complete", json!({ @@ -698,7 +560,7 @@ impl BatchedGlobPlanner { ); // Convert HashMaps to sorted Vecs and deduplicate - crate::log_debug!( + crate::debug!( logger, "cleanup native glob result compaction started", json!({}) @@ -723,7 +585,7 @@ impl BatchedGlobPlanner { .collect(); delete_vec.sort_by_key(|(idx, _)| *idx); - crate::log_debug!( + crate::debug!( logger, "cleanup native glob result compaction complete", json!({ @@ -1486,7 +1348,7 @@ fn default_protection_mode_for_target(target: &CleanupTargetDto) -> ProtectionMo pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { let logger = create_logger("CleanupNative", None); - crate::log_trace!( + crate::trace!( logger, "cleanup native plan started", json!({ @@ -1578,7 +1440,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { } } - crate::log_trace!( + crate::trace!( logger, "cleanup native plan inventory collected", json!({ @@ -1615,7 +1477,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { } // Execute the batched glob expansion - crate::log_trace!( + crate::trace!( logger, "cleanup native glob expansion started", json!({ @@ -1633,7 +1495,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { .iter() .map(|(_, paths)| paths.len()) .sum::(); - crate::log_trace!( + crate::trace!( logger, "cleanup native glob expansion complete", json!({ @@ -1673,7 +1535,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { let guard = create_guard(&snapshot, &protected_rules)?; let conflicts = detect_cleanup_protection_conflicts(&output_path_owners, &guard); if !conflicts.is_empty() { - crate::log_trace!( + crate::trace!( logger, "cleanup native plan blocked", json!({ @@ -1693,7 +1555,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { let file_candidates = delete_files.into_iter().collect::>(); let dir_candidates = delete_dirs.into_iter().collect::>(); - crate::log_trace!( + crate::trace!( logger, "cleanup native file partition started", json!({ @@ -1703,7 +1565,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { ); let file_partition = partition_deletion_targets(&file_candidates, &guard, Some(&exact_safe_file_paths)); - crate::log_trace!( + crate::trace!( logger, "cleanup native file partition complete", json!({ @@ -1712,7 +1574,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { "violationCount": file_partition.violations.len(), }) ); - crate::log_trace!( + crate::trace!( logger, "cleanup native directory partition started", json!({ @@ -1721,7 +1583,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { }) ); let dir_partition = partition_deletion_targets(&dir_candidates, &guard, None); - crate::log_trace!( + crate::trace!( logger, "cleanup native directory partition complete", json!({ @@ -1730,14 +1592,14 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { "violationCount": dir_partition.violations.len(), }) ); - crate::log_trace!( + crate::trace!( logger, "cleanup native target compaction started", json!({}) ); let (files_to_delete, dirs_to_delete) = compact_deletion_targets(&file_partition.safe_paths, &dir_partition.safe_paths); - crate::log_trace!( + crate::trace!( logger, "cleanup native target compaction complete", json!({ @@ -1745,7 +1607,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { "compactedDirs": dirs_to_delete.len(), }) ); - crate::log_trace!( + crate::trace!( logger, "cleanup native target partition complete", json!({ @@ -1778,7 +1640,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { .map(|pattern| normalize_relative_glob_pattern(pattern)) .collect::>(), )?; - crate::log_trace!( + crate::trace!( logger, "cleanup native empty directory planning started", json!({ @@ -1806,7 +1668,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { .into_iter() .filter(|violation| !target_matches_project_root(&violation.target_path, &project_root_keys)) .collect::>(); - crate::log_trace!( + crate::trace!( logger, "cleanup native empty directory planning complete", json!({ @@ -1820,7 +1682,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { violations.extend(empty_dir_violations); violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); - crate::log_debug!( + crate::debug!( logger, "cleanup native plan complete", json!({ @@ -1844,10 +1706,10 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { pub fn perform_cleanup(snapshot: CleanupSnapshot) -> Result { let logger = create_logger("CleanupNative", None); - crate::log_trace!(logger, "cleanup native perform started", json!({})); + crate::trace!(logger, "cleanup native perform started", json!({})); let plan = plan_cleanup(snapshot)?; if !plan.conflicts.is_empty() || !plan.violations.is_empty() { - crate::log_trace!( + crate::trace!( logger, "cleanup native perform blocked", json!({ @@ -1868,7 +1730,7 @@ pub fn perform_cleanup(snapshot: CleanupSnapshot) -> Result Result Result Result Result Result Result Option { - let dot_git = project_dir.join(".git"); - if !dot_git.exists() { - return None; - } - - let metadata = fs::symlink_metadata(&dot_git).ok()?; - if metadata.is_dir() { - return Some(dot_git.join("info")); - } - - if metadata.is_file() { - let content = fs::read_to_string(&dot_git).ok()?; - for line in content.lines() { - let line = line.trim(); - if let Some(gitdir) = line.strip_prefix("gitdir:") { - let gitdir = Path::new(gitdir.trim()); - let resolved = if gitdir.is_absolute() { - gitdir.to_path_buf() - } else { - project_dir.join(gitdir) - }; - return Some(resolved.join("info")); - } - } - } - - None -} - -const SKIP_DIRS: &[&str] = &["node_modules", ".turbo", "dist", "build", "out", ".cache"]; - -/// Recursively discovers all `.git` entries (directories or files) under a given root, -/// skipping common non-source directories. Returns absolute paths of directories -/// containing a `.git` entry. The `root_dir` itself is excluded from results. -pub fn find_all_git_repos(root_dir: &Path, max_depth: usize) -> Vec { - let mut results = Vec::new(); - - fn walk(dir: &Path, root_dir: &Path, depth: usize, max_depth: usize, results: &mut Vec) { - if depth > max_depth { - return; - } - - let entries = match fs::read_dir(dir) { - Ok(e) => e, - Err(_) => return, - }; - - let mut has_git = false; - let mut subdirs = Vec::new(); - - for entry in entries.flatten() { - let name = entry.file_name(); - let name_str = name.to_string_lossy(); - if name_str == ".git" { - has_git = true; - continue; - } - if let Ok(ft) = entry.file_type() - && ft.is_dir() - && !SKIP_DIRS.contains(&name_str.as_ref()) - { - subdirs.push(entry.path()); - } - } - - if has_git && dir != root_dir { - results.push(dir.to_path_buf()); - } - - for subdir in subdirs { - walk(&subdir, root_dir, depth + 1, max_depth, results); - } - } - - walk(root_dir, root_dir, 0, max_depth, &mut results); - results -} - /// Scans `.git/modules/` directory recursively to find all submodule `info/` dirs. /// Handles nested submodules (modules within modules). Returns absolute paths of /// `info/` directories. @@ -169,110 +84,6 @@ mod tests { use std::fs; use tempfile::TempDir; - #[test] - fn test_resolve_git_info_dir_for_regular_repo() { - let tmp = TempDir::new().unwrap(); - let dot_git = tmp.path().join(".git"); - fs::create_dir_all(&dot_git).unwrap(); - - let result = resolve_git_info_dir(tmp.path()); - assert_eq!(result, Some(dot_git.join("info"))); - } - - #[test] - fn test_resolve_git_info_dir_for_gitlink() { - let tmp = TempDir::new().unwrap(); - let dot_git = tmp.path().join(".git"); - fs::write(&dot_git, "gitdir: /absolute/path/to/git\n").unwrap(); - - let result = resolve_git_info_dir(tmp.path()); - assert_eq!(result, Some(PathBuf::from("/absolute/path/to/git/info"))); - } - - #[test] - fn test_resolve_git_info_dir_for_relative_gitlink() { - let tmp = TempDir::new().unwrap(); - let dot_git = tmp.path().join(".git"); - fs::write(&dot_git, "gitdir: ../.git/modules/foo\n").unwrap(); - - let result = resolve_git_info_dir(tmp.path()); - assert_eq!( - result, - Some( - tmp - .path() - .join("..") - .join(".git") - .join("modules") - .join("foo") - .join("info") - .canonicalize() - .unwrap_or_else(|_| tmp - .path() - .join("..") - .join(".git") - .join("modules") - .join("foo") - .join("info")) - ) - ); - } - - #[test] - fn test_resolve_git_info_dir_missing() { - let tmp = TempDir::new().unwrap(); - assert_eq!(resolve_git_info_dir(tmp.path()), None); - } - - #[test] - fn test_find_all_git_repos_finds_nested() { - let tmp = TempDir::new().unwrap(); - let root = tmp.path(); - let child = root.join("packages").join("app"); - fs::create_dir_all(root.join(".git")).unwrap(); - fs::create_dir_all(child.join(".git")).unwrap(); - - let result = find_all_git_repos(root, 5); - assert_eq!(result.len(), 1); - assert_eq!(result[0], child); - } - - #[test] - fn test_find_all_git_repos_excludes_root() { - let tmp = TempDir::new().unwrap(); - let root = tmp.path(); - fs::create_dir_all(root.join(".git")).unwrap(); - - let result = find_all_git_repos(root, 5); - assert!(result.is_empty()); - } - - #[test] - fn test_find_all_git_repos_skips_skip_dirs() { - let tmp = TempDir::new().unwrap(); - let root = tmp.path(); - let node_modules = root.join("node_modules").join("some-lib"); - fs::create_dir_all(node_modules.join(".git")).unwrap(); - - let result = find_all_git_repos(root, 5); - assert!(result.is_empty()); - } - - #[test] - fn test_find_all_git_repos_respects_max_depth() { - let tmp = TempDir::new().unwrap(); - let root = tmp.path(); - let deep = root.join("a").join("b").join("c").join("d"); - fs::create_dir_all(deep.join(".git")).unwrap(); - - let result = find_all_git_repos(root, 3); - assert!(result.is_empty()); - - let result = find_all_git_repos(root, 4); - assert_eq!(result.len(), 1); - assert_eq!(result[0], deep); - } - #[test] fn test_find_git_module_info_dirs_finds_submodules() { let tmp = TempDir::new().unwrap(); diff --git a/sdk/src/policy/path_blocking.rs b/sdk/src/policy/path_blocking.rs index cd6a2ae1..8036427b 100644 --- a/sdk/src/policy/path_blocking.rs +++ b/sdk/src/policy/path_blocking.rs @@ -37,7 +37,12 @@ pub fn find_blocking_non_directory_path(expected_dir_path: &str) -> Option Result { + let logger = create_logger("clean", options.log_level.as_deref().and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s))); + let _span = logger.span("command.clean").enter(); + + logger.info("Clean started", None); + let cwd = resolve_cwd(options.cwd.as_deref())?; + + let config_span = logger.span("config.load").enter(); let config_result = load_config(&cwd, options.load_user_config)?; + config_span.exit(); + let workspace_dir = resolve_workspace_dir(&cwd, &config_result.config)?; let workspace_warning = build_workspace_mismatch_warning(&cwd, &workspace_dir, &config_result); let workspace_dir_str = workspace_dir.to_string_lossy().into_owned(); - let global_scope = build_global_scope(&config_result.config); - let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref()); + logger.info("Config loaded", Some(json!({ + "workspaceDir": &workspace_dir_str, + }))); + + let global_scope = crate::services::common::build_global_scope(&config_result.config); + let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref(), DefaultPluginKind::Clean); + + let context_span = logger.span("context.collect").enter(); + let context = collect_context(&workspace_dir_str, global_scope.as_ref(), &enabled_plugins, &logger)?; + context_span.exit(); - let context = collect_context(&workspace_dir_str, global_scope.as_ref())?; - let (output_map, cleanup_map) = build_output_map(&context, enabled_plugins)?; + logger.info("Context collected", Some(json!({ + "globalMemory": context.global_memory.is_some(), + }))); + + let discover_span = logger.span("cleanup.discover").enter(); + let (output_map, cleanup_map) = build_output_map(&context, enabled_plugins, &logger)?; let mut snapshot = build_cleanup_snapshot(&workspace_dir_str, &output_map, &cleanup_map)?; + discover_span.exit(); + + logger.info("Cleanup targets discovered", Some(json!({ + "pluginCount": snapshot.plugin_snapshots.len(), + "projectRoots": snapshot.project_roots.len(), + }))); - // 根据 cwd 限制清理作用域:当 cwd 位于 workspace 的某个子项目下时, - // 只清理该项目的文件,不触碰其他项目。 + // 根据 cwd 限制清理作用域 if let Some(scope) = resolve_project_scope(&cwd, &workspace_dir) { + logger.info("Project scope resolved", Some(json!({ + "scope": scope.to_string_lossy().to_string(), + }))); snapshot = filter_snapshot_by_scope(snapshot, &scope, &workspace_dir); } if options.dry_run.unwrap_or(false) { + let plan_span = logger.span("cleanup.plan").enter(); let plan = crate::policy::cleanup::plan_cleanup(snapshot.clone()) .map_err(|e| CliError::ExecutionError(e))?; + plan_span.exit(); + let mut warnings = workspace_warning.into_iter().collect::>(); warnings.extend(plan.violations.iter().map(|v| { json!({ @@ -44,6 +79,15 @@ pub fn clean(options: MemorySyncCommandOptions) -> Result Result>(); @@ -104,6 +151,15 @@ pub fn clean(options: MemorySyncCommandOptions) -> Result Result) -> Result { - match cwd { - Some(value) => Ok(config::resolve_workspace_dir(value)), - None => std::env::current_dir().map_err(CliError::IoError), - } -} +// --------------------------------------------------------------------------- +// Scope resolution +// --------------------------------------------------------------------------- -fn load_config( - cwd: &Path, - load_user_config: Option, -) -> Result { - if load_user_config == Some(false) { - return Ok(config::MergedConfigResult { - config: UserConfigFile::default(), - sources: vec![], - found: false, - }); - } - - let result = ConfigLoader::with_defaults() - .try_load(cwd) - .map_err(CliError::ConfigError)?; - - if !result.found { - let config_path = config::get_required_global_config_path() - .unwrap_or_else(|_| config::get_global_config_path()); - return Err(CliError::ConfigError(format!( - "Required config file not found at {}. Please create it before running tnmsc.", - config_path.display() - ))); - } +fn resolve_project_scope(cwd: &Path, workspace_dir: &Path) -> Option { + let cwd_norm = strip_unc_prefix(cwd); + let ws_norm = strip_unc_prefix(workspace_dir); - Ok(result) -} + let relative = cwd_norm.strip_prefix(&ws_norm).ok()?; -fn resolve_workspace_dir(_cwd: &Path, config: &UserConfigFile) -> Result { - match config.workspace_dir.as_deref() { - Some(dir) => Ok(config::resolve_workspace_dir(dir)), - None => Err(CliError::ConfigError( - "workspaceDir is required but was not configured. Please set workspaceDir in your .tnmsc.json config file.".to_string(), - )), + if relative.as_os_str().is_empty() { + return None; } + + let first_component = relative.components().next()?; + Some(ws_norm.join(first_component.as_os_str())) } -/// 检查路径是否在某个目录下(包含目录本身)。 fn is_path_under_directory(path: &str, directory: &Path) -> bool { let path_buf = Path::new(path); let path_normalized = if path_buf.is_absolute() { @@ -188,44 +217,6 @@ fn is_path_under_directory(path: &str, directory: &Path) -> bool { path_str == dir_str || path_str.starts_with(&format!("{}/", dir_str)) } -/// 根据 cwd 和 workspace_dir 解析项目作用域。 -/// -/// 如果 cwd 位于 workspace_dir 的某个直接子目录下,返回该子目录作为作用域。 -/// 如果 cwd 等于 workspace_dir 或位于 workspace_dir 之外,返回 None(清理全部)。 -/// 移除 Windows UNC 前缀 \\?\,使路径可以与其他非 canonicalize 路径比较。 -fn strip_unc_prefix(path: &Path) -> PathBuf { - let s = path.to_string_lossy(); - if let Some(stripped) = s.strip_prefix(r"\\?\") { - PathBuf::from(stripped) - } else { - path.to_path_buf() - } -} - -fn resolve_project_scope(cwd: &Path, workspace_dir: &Path) -> Option { - // workspace_dir 可能经过 canonicalize 带有 Windows UNC 前缀,需要统一格式 - let cwd_norm = strip_unc_prefix(cwd); - let ws_norm = strip_unc_prefix(workspace_dir); - - let relative = cwd_norm.strip_prefix(&ws_norm).ok()?; - - // 如果 cwd 等于 workspace_dir,不限制作用域 - if relative.as_os_str().is_empty() { - return None; - } - - // 取 workspace_dir 的直接子目录作为作用域 - let first_component = relative.components().next()?; - Some(ws_norm.join(first_component.as_os_str())) -} - -/// 根据作用域过滤 CleanupSnapshot。 -/// -/// 保留规则: -/// - 位于作用域目录下的路径(项目内文件) -/// - 位于 workspace 之外的路径(全局文件) -/// 过滤掉: -/// - 位于 workspace 内但不在作用域下的路径(其他项目文件) fn filter_snapshot_by_scope( mut snapshot: CleanupSnapshot, scope: &Path, @@ -255,346 +246,14 @@ fn filter_snapshot_by_scope( snapshot } -fn build_global_scope(config: &UserConfigFile) -> Option { - let mut scope = serde_json::Map::new(); - - let mut os = serde_json::Map::new(); - os.insert("platform".to_string(), json!(std::env::consts::OS)); - os.insert("arch".to_string(), json!(std::env::consts::ARCH)); - os.insert("name".to_string(), json!(std::env::consts::OS)); - scope.insert("os".to_string(), Value::Object(os)); - - if let Some(profile) = config.profile.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(name) = profile.name.as_ref() { - value.insert("name".to_string(), json!(name)); - } - if let Some(username) = profile.username.as_ref() { - value.insert("username".to_string(), json!(username)); - } - if let Some(gender) = profile.gender.as_ref() { - value.insert("gender".to_string(), json!(gender)); - } - if let Some(birthday) = profile.birthday.as_ref() { - value.insert("birthday".to_string(), json!(birthday)); - } - for (key, extra) in &profile.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("profile".to_string(), Value::Object(value)); - } - } - - if let Some(code_styles) = config.code_styles.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(indent) = code_styles.indent { - value.insert( - "indent".to_string(), - json!(match indent { - config::CodeStyleIndent::Tab => "tab", - config::CodeStyleIndent::Space => "space", - }), - ); - } - if let Some(tab_size) = code_styles.tab_size { - value.insert("tabSize".to_string(), json!(tab_size)); - } - for (key, extra) in &code_styles.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("codeStyles".to_string(), Value::Object(value)); - } - } - - let mut tool = serde_json::Map::new(); - tool.insert("name".to_string(), json!("tnmsc")); - tool.insert("version".to_string(), json!(crate::version())); - scope.insert("tool".to_string(), Value::Object(tool)); - - (!scope.is_empty()).then(|| Value::Object(scope)) -} - -#[derive(Debug, Clone, Copy, Default)] -struct EnabledPlugins { - agents_md: bool, - claude_code: bool, - codex: bool, - cursor: bool, - droid: bool, - gemini: bool, - git: bool, - jetbrains: bool, - jetbrains_code_style: bool, - kiro: bool, - opencode: bool, - qoder: bool, - readme: bool, - trae: bool, - trae_cn: bool, - vscode: bool, - warp: bool, - windsurf: bool, - zed: bool, -} - -impl EnabledPlugins { - fn from_config(config: Option<&PluginsConfig>) -> Self { - Self { - agents_md: config.and_then(|value| value.agents_md).unwrap_or(true), - claude_code: config.and_then(|value| value.claude_code).unwrap_or(true), - codex: config.and_then(|value| value.codex).unwrap_or(false), - cursor: config.and_then(|value| value.cursor).unwrap_or(false), - droid: config.and_then(|value| value.droid).unwrap_or(false), - gemini: config.and_then(|value| value.gemini).unwrap_or(false), - git: config.and_then(|value| value.git).unwrap_or(true), - jetbrains: config.and_then(|value| value.jetbrains).unwrap_or(false), - jetbrains_code_style: config - .and_then(|value| value.jetbrains_code_style) - .unwrap_or(false), - kiro: config.and_then(|value| value.kiro).unwrap_or(false), - opencode: config.and_then(|value| value.opencode).unwrap_or(true), - qoder: config.and_then(|value| value.qoder).unwrap_or(false), - readme: config.and_then(|value| value.readme).unwrap_or(true), - trae: config.and_then(|value| value.trae).unwrap_or(false), - trae_cn: config.and_then(|value| value.trae_cn).unwrap_or(false), - vscode: config.and_then(|value| value.vscode).unwrap_or(false), - warp: config.and_then(|value| value.warp).unwrap_or(false), - windsurf: config.and_then(|value| value.windsurf).unwrap_or(false), - zed: config.and_then(|value| value.zed).unwrap_or(false), - } - } - - fn is_enabled(self, plugin_name: &str) -> bool { - match plugin_name { - "AgentsOutputAdaptor" => self.agents_md, - "GitExcludeOutputAdaptor" => self.git, - "JetBrainsIDECodeStyleConfigOutputAdaptor" => self.jetbrains_code_style, - "VisualStudioCodeIDEConfigOutputAdaptor" => self.vscode, - "ZedIDEConfigOutputAdaptor" => self.zed, - "ReadmeMdConfigFileOutputAdaptor" => self.readme, - "ClaudeCodeCLIOutputAdaptor" => self.claude_code, - "CodexCLIOutputAdaptor" => self.codex, - "CursorOutputAdaptor" => self.cursor, - "DroidCLIOutputAdaptor" => self.droid, - "GeminiCLIOutputAdaptor" => self.gemini, - "JetBrainsAIAssistantCodexOutputAdaptor" => self.jetbrains, - "KiroCLIOutputAdaptor" => self.kiro, - "OpencodeCLIOutputAdaptor" => self.opencode, - "QoderIDEPluginOutputAdaptor" => self.qoder, - "TraeOutputAdaptor" => self.trae || self.trae_cn, - "WarpIDEOutputAdaptor" => self.warp, - "WindsurfOutputAdaptor" => self.windsurf, - _ => false, - } - } -} - -fn collect_context( - workspace_dir: &str, - _global_scope: Option<&Value>, -) -> Result { - #[derive(Debug, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct WorkspaceEnvelope { - workspace: crate::domain::plugin_shared::Workspace, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct GlobalMemoryEnvelope { - #[serde(default)] - global_memory: Option, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct CommandsEnvelope { - #[serde(default)] - commands: Vec, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct SubAgentsEnvelope { - #[serde(default)] - sub_agents: Vec, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct SkillsEnvelope { - #[serde(default)] - skills: Vec, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct RulesEnvelope { - #[serde(default)] - rules: Vec, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct ReadmeEnvelope { - #[serde(default)] - readme_prompts: Vec, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct GitIgnoreEnvelope { - #[serde(default)] - global_git_ignore: Option, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct GitExcludeEnvelope { - #[serde(default)] - shadow_git_exclude: Option, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct SharedIgnoreEnvelope { - #[serde(default)] - ai_agent_ignore_config_files: - Option>, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct VSCodeEnvelope { - #[serde(default)] - vscode_config_files: Option>, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct ZedEnvelope { - #[serde(default)] - zed_config_files: Option>, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct JetBrainsEnvelope { - #[serde(default)] - jetbrains_config_files: Option>, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct EditorConfigEnvelope { - #[serde(default)] - editor_config_files: Option>, - } - - fn collect_json( - collector: impl Fn(&str) -> Result, - input: Value, - ) -> Result - where - T: serde::de::DeserializeOwned, - { - let raw = collector(&input.to_string())?; - serde_json::from_str(&raw).map_err(CliError::SerializationError) - } - - let aindex = collect_json::( - crate::repositories::aindex_resolvers::collect_aindex_resolvers, - json!({ "workspaceDir": workspace_dir }), - )?; - - let project_prompts = collect_json::( - crate::repositories::project_prompt::collect_project_prompt, - json!({ - "workspaceDir": workspace_dir, - "workspace": aindex.workspace, - "globalScope": None::, - }), - )?; - - let global_memory = collect_json::( - crate::repositories::global_memory::collect_global_memory, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - - let commands = collect_json::( - crate::repositories::command::collect_command, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let sub_agents = collect_json::( - crate::repositories::subagent::collect_subagent, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let skills = collect_json::( - crate::repositories::skill::collect_skill, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let rules = collect_json::( - crate::repositories::rule::collect_rule, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let readme = collect_json::( - crate::repositories::readme::collect_readme, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let gitignore = collect_json::( - crate::repositories::gitignore::collect_gitignore, - json!({ "workspaceDir": workspace_dir }), - )?; - let git_exclude = collect_json::( - crate::repositories::git_exclude::collect_git_exclude, - json!({ "workspaceDir": workspace_dir }), - )?; - let shared_ignore = collect_json::( - crate::repositories::shared_ignore::collect_shared_ignore, - json!({ "workspaceDir": workspace_dir }), - )?; - let vscode = collect_json::( - crate::repositories::vscode_config::collect_vscode_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let zed = collect_json::( - crate::repositories::zed_config::collect_zed_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let jetbrains = collect_json::( - crate::repositories::jetbrains_config::collect_jetbrains_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let editor_config = collect_json::( - crate::repositories::editorconfig::collect_editorconfig, - json!({ "workspaceDir": workspace_dir }), - )?; - - Ok(OutputContext { - workspace: Some(project_prompts.workspace), - vscode_config_files: vscode.vscode_config_files, - zed_config_files: zed.zed_config_files, - jetbrains_config_files: jetbrains.jetbrains_config_files, - editor_config_files: editor_config.editor_config_files, - fast_commands: (!commands.commands.is_empty()).then_some(commands.commands), - sub_agents: (!sub_agents.sub_agents.is_empty()).then_some(sub_agents.sub_agents), - skills: (!skills.skills.is_empty()).then_some(skills.skills), - rules: (!rules.rules.is_empty()).then_some(rules.rules), - global_memory: global_memory.global_memory, - global_git_ignore: gitignore.global_git_ignore, - shadow_git_exclude: git_exclude.shadow_git_exclude, - shadow_source_project_dir: None, - readme_prompts: (!readme.readme_prompts.is_empty()).then_some(readme.readme_prompts), - ai_agent_ignore_config_files: shared_ignore.ai_agent_ignore_config_files, - registered_output_plugins: None, - }) -} +// --------------------------------------------------------------------------- +// Output map building +// --------------------------------------------------------------------------- fn build_output_map( - context: &OutputContext, + context: &crate::context::OutputContext, enabled_plugins: EnabledPlugins, + logger: &Logger, ) -> Result< ( HashMap>, @@ -605,11 +264,9 @@ fn build_output_map( let mut output_map: HashMap> = HashMap::new(); let mut cleanup_map: HashMap = HashMap::new(); + let base_span = logger.span("output.build").enter(); let base_plans = crate::domain::base_output_plans::build_base_output_plans(context)?; for plan in &base_plans.plugins { - // Cleanup targets are always collected regardless of plugin enablement. - // This ensures `tnmsc clean` removes stale files even when a plugin - // has been disabled after previously being enabled. cleanup_map .entry(plan.plugin_name.clone()) .or_insert_with(CleanupDeclarationsDto::default) @@ -625,203 +282,88 @@ fn build_output_map( } } - if let Ok(plan) = - crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context) - { - cleanup_map - .entry("ClaudeCodeCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + // Build plugin-specific output maps + if let Ok(plan) = crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context) { + cleanup_map.entry("ClaudeCodeCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.claude_code { - for file in &plan.output_files { - output_map - .entry("ClaudeCodeCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("ClaudeCodeCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = crate::domain::output_plans::codex_output_plan::build_codex_output_plan(context) - { - cleanup_map - .entry("CodexCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::codex_output_plan::build_codex_output_plan(context) { + cleanup_map.entry("CodexCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.codex { - for file in &plan.output_files { - output_map - .entry("CodexCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("CodexCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = - crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(context) - { - cleanup_map - .entry("CursorOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(context) { + cleanup_map.entry("CursorOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.cursor { - for file in &plan.output_files { - output_map - .entry("CursorOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("CursorOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = crate::domain::output_plans::droid_output_plan::build_droid_output_plan(context) - { - cleanup_map - .entry("DroidCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::droid_output_plan::build_droid_output_plan(context) { + cleanup_map.entry("DroidCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.droid { - for file in &plan.output_files { - output_map - .entry("DroidCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("DroidCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = - crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(context) - { - cleanup_map - .entry("GeminiCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(context) { + cleanup_map.entry("GeminiCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.gemini { - for file in &plan.output_files { - output_map - .entry("GeminiCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("GeminiCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } if let Ok(plan) = crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context) { - cleanup_map - .entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + cleanup_map.entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.jetbrains { - for file in &plan.output_files { - output_map - .entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } if let Ok(plan) = crate::domain::output_plans::kiro_output_plan::build_kiro_output_plan(context) { - cleanup_map - .entry("KiroCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + cleanup_map.entry("KiroCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.kiro { - for file in &plan.output_files { - output_map - .entry("KiroCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("KiroCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = - crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context) - { - cleanup_map - .entry("OpencodeCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context) { + cleanup_map.entry("OpencodeCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.opencode { - for file in &plan.output_files { - output_map - .entry("OpencodeCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("OpencodeCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = crate::domain::output_plans::qoder_output_plan::build_qoder_output_plan(context) - { - cleanup_map - .entry("QoderIDEPluginOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::qoder_output_plan::build_qoder_output_plan(context) { + cleanup_map.entry("QoderIDEPluginOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.qoder { - for file in &plan.output_files { - output_map - .entry("QoderIDEPluginOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("QoderIDEPluginOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } if let Ok(plan) = crate::domain::output_plans::trae_output_plan::build_trae_output_plan(context) { - cleanup_map - .entry("TraeOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + cleanup_map.entry("TraeOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.trae || enabled_plugins.trae_cn { - for file in &plan.output_files { - output_map - .entry("TraeOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("TraeOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } if let Ok(plan) = crate::domain::output_plans::warp_output_plan::build_warp_output_plan(context) { - cleanup_map - .entry("WarpIDEOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + cleanup_map.entry("WarpIDEOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.warp { - for file in &plan.output_files { - output_map - .entry("WarpIDEOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("WarpIDEOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = - crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context) - { - cleanup_map - .entry("WindsurfOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context) { + cleanup_map.entry("WindsurfOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.windsurf { - for file in &plan.output_files { - output_map - .entry("WindsurfOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("WindsurfOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } + base_span.exit(); Ok((output_map, cleanup_map)) } +// --------------------------------------------------------------------------- +// Cleanup snapshot +// --------------------------------------------------------------------------- + fn build_cleanup_snapshot( workspace_dir: &str, output_map: &HashMap>, @@ -829,8 +371,6 @@ fn build_cleanup_snapshot( ) -> Result { let mut plugin_snapshots = Vec::new(); - // Include all plugins that have either outputs or cleanup targets. - // This ensures disabled plugins still contribute their cleanup declarations. let mut all_plugin_names: Vec<&String> = output_map.keys().collect(); for name in cleanup_map.keys() { if !all_plugin_names.contains(&name) { @@ -859,7 +399,7 @@ fn build_cleanup_snapshot( let mut delete_targets = Vec::new(); for root_path in &project_roots { - let root = std::path::Path::new(root_path); + let root = Path::new(root_path); let agents_path = root.join("AGENTS.md"); let claude_path = root.join("CLAUDE.md"); let agt_path = root.join("agt.mdx"); @@ -915,7 +455,7 @@ fn build_cleanup_snapshot( } fn discover_project_roots(workspace_dir: &str) -> Vec { - let ws_path = std::path::Path::new(workspace_dir); + let ws_path = Path::new(workspace_dir); let mut roots = Vec::new(); if let Ok(entries) = std::fs::read_dir(ws_path) { @@ -937,12 +477,16 @@ fn discover_project_roots(workspace_dir: &str) -> Vec { roots } +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + #[cfg(test)] mod tests { use super::*; use tempfile::TempDir; - fn with_home_dir(home_dir: &Path, callback: impl FnOnce() -> T) -> T { + fn with_home_dir(home_dir: &std::path::Path, callback: impl FnOnce() -> T) -> T { let _guard = match crate::domain::TEST_ENV_LOCK.lock() { Ok(g) => g, Err(error) => error.into_inner(), @@ -967,7 +511,7 @@ mod tests { result } - fn create_test_config(home_dir: &Path, workspace_dir: &Path) -> std::io::Result<()> { + fn create_test_config(home_dir: &std::path::Path, workspace_dir: &std::path::Path) -> std::io::Result<()> { let config_content = json!({ "workspaceDir": workspace_dir.to_string_lossy() }); @@ -1061,7 +605,7 @@ mod tests { #[test] fn clean_enabled_plugins_from_empty_config() { - let plugins = EnabledPlugins::from_config(None); + let plugins = EnabledPlugins::from_config(None, DefaultPluginKind::Clean); assert!(plugins.agents_md); assert!(plugins.claude_code); assert!(plugins.git); @@ -1070,13 +614,13 @@ mod tests { #[test] fn clean_enabled_plugins_respects_config() { - let config = PluginsConfig { + let config = crate::domain::config::PluginsConfig { git: Some(false), readme: Some(false), claude_code: Some(true), ..Default::default() }; - let plugins = EnabledPlugins::from_config(Some(&config)); + let plugins = EnabledPlugins::from_config(Some(&config), DefaultPluginKind::Clean); assert!(!plugins.git); assert!(!plugins.readme); assert!(plugins.claude_code); @@ -1084,7 +628,7 @@ mod tests { #[test] fn clean_plugin_name_matching() { - let plugins = EnabledPlugins::from_config(None); + let plugins = EnabledPlugins::from_config(None, DefaultPluginKind::Clean); assert!(plugins.is_enabled("GitExcludeOutputAdaptor")); assert!(plugins.is_enabled("ReadmeMdConfigFileOutputAdaptor")); assert!(plugins.is_enabled("ClaudeCodeCLIOutputAdaptor")); @@ -1121,19 +665,10 @@ mod tests { assert!(snapshot.aindex_dir.unwrap().contains("aindex")); } - /// 回归测试:clean 必须始终收集所有插件的 cleanup targets,无论插件是否启用。 - /// - /// 设计原因:用户可能在禁用某个插件之前已经运行过 install,导致该插件生成的文件 - /// 仍然残留在项目或全局目录中。如果 clean 也跟随插件开关,则这些残留文件将永远 - /// 无法被自动清理,造成"清爽的编程上下文环境"被破坏。 - /// - /// 因此,install 行为受插件开关控制(只生成启用插件的文件),而 clean 行为 - /// 不受插件开关控制(总是清理所有已知插件的输出文件)。 #[test] fn clean_snapshot_includes_disabled_plugin_cleanup_targets() { let workspace_dir = "/tmp/test-workspace"; let output_map: HashMap> = HashMap::new(); - // 模拟 agents_md 被禁用:没有 outputs,但有 cleanup targets let mut cleanup_map: HashMap = HashMap::new(); cleanup_map.insert( "AgentsOutputAdaptor".to_string(), @@ -1173,7 +708,6 @@ mod tests { ); } - /// 回归测试:build_cleanup_snapshot 应同时包含 output_map 和 cleanup_map 中的插件。 #[test] fn clean_snapshot_collects_from_both_maps() { let workspace_dir = "/tmp/test-workspace"; @@ -1336,13 +870,14 @@ mod tests { fn clean_filter_snapshot_by_scope_filters_project_roots() { let temp_dir = TempDir::new().unwrap(); let ws = temp_dir.path(); - // 创建真实的项目目录,discover_project_roots 需要读取实际文件系统 std::fs::create_dir_all(ws.join("project-a")).unwrap(); std::fs::create_dir_all(ws.join("project-b")).unwrap(); let scope = ws.join("project-a"); - let snapshot = - build_cleanup_snapshot(&ws.to_string_lossy(), &HashMap::new(), &HashMap::new()).unwrap(); + let snapshot = build_cleanup_snapshot(&ws.to_string_lossy(), + &HashMap::new(), + &HashMap::new(), + ).unwrap(); let filtered = filter_snapshot_by_scope(snapshot, &scope, ws); diff --git a/sdk/src/services/command_diagnostics.rs b/sdk/src/services/command_diagnostics_service.rs similarity index 92% rename from sdk/src/services/command_diagnostics.rs rename to sdk/src/services/command_diagnostics_service.rs index 2a51c557..a9dcf328 100644 --- a/sdk/src/services/command_diagnostics.rs +++ b/sdk/src/services/command_diagnostics_service.rs @@ -1,8 +1,9 @@ -use std::path::{Path, PathBuf}; +use std::path::Path; use serde_json::{Value, json}; use crate::domain::config::MergedConfigResult; +use crate::services::common::strip_unc_prefix; pub(crate) fn build_workspace_mismatch_warning( cwd: &Path, @@ -42,15 +43,6 @@ fn normalize_display_path(path: &Path) -> String { strip_unc_prefix(path).to_string_lossy().into_owned() } -fn strip_unc_prefix(path: &Path) -> PathBuf { - let value = path.to_string_lossy(); - if let Some(stripped) = value.strip_prefix(r"\\?\") { - PathBuf::from(stripped) - } else { - path.to_path_buf() - } -} - #[cfg(test)] mod tests { use std::path::PathBuf; diff --git a/sdk/src/services/common.rs b/sdk/src/services/common.rs new file mode 100644 index 00000000..f9e23bc7 --- /dev/null +++ b/sdk/src/services/common.rs @@ -0,0 +1,521 @@ +use std::path::{Path, PathBuf}; + +use serde::de::DeserializeOwned; +use serde_json::{Value, json}; + +use crate::context::OutputContext; +use crate::domain::config::{self, ConfigLoader, PluginsConfig, UserConfigFile}; +use crate::infra::logger::Logger; +use crate::CliError; + +// --------------------------------------------------------------------------- +// Plugin defaults +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Copy)] +pub enum DefaultPluginKind { + Install, + DryRun, + Clean, +} + +#[derive(Debug, Clone, Copy, Default)] +pub struct EnabledPlugins { + pub agents_md: bool, + pub claude_code: bool, + pub codex: bool, + pub cursor: bool, + pub droid: bool, + pub gemini: bool, + pub git: bool, + pub jetbrains: bool, + pub jetbrains_code_style: bool, + pub kiro: bool, + pub opencode: bool, + pub qoder: bool, + pub readme: bool, + pub trae: bool, + pub trae_cn: bool, + pub vscode: bool, + pub warp: bool, + pub windsurf: bool, + pub zed: bool, +} + +impl EnabledPlugins { + pub fn from_config(config: Option<&PluginsConfig>, kind: DefaultPluginKind) -> Self { + let (claude_default, opencode_default) = match kind { + DefaultPluginKind::DryRun => (false, false), + _ => (true, true), + }; + + Self { + agents_md: config.and_then(|v| v.agents_md).unwrap_or(true), + claude_code: config.and_then(|v| v.claude_code).unwrap_or(claude_default), + codex: config.and_then(|v| v.codex).unwrap_or(false), + cursor: config.and_then(|v| v.cursor).unwrap_or(false), + droid: config.and_then(|v| v.droid).unwrap_or(false), + gemini: config.and_then(|v| v.gemini).unwrap_or(false), + git: config.and_then(|v| v.git).unwrap_or(true), + jetbrains: config.and_then(|v| v.jetbrains).unwrap_or(false), + jetbrains_code_style: config.and_then(|v| v.jetbrains_code_style).unwrap_or(false), + kiro: config.and_then(|v| v.kiro).unwrap_or(false), + opencode: config.and_then(|v| v.opencode).unwrap_or(opencode_default), + qoder: config.and_then(|v| v.qoder).unwrap_or(false), + readme: config.and_then(|v| v.readme).unwrap_or(true), + trae: config.and_then(|v| v.trae).unwrap_or(false), + trae_cn: config.and_then(|v| v.trae_cn).unwrap_or(false), + vscode: config.and_then(|v| v.vscode).unwrap_or(false), + warp: config.and_then(|v| v.warp).unwrap_or(false), + windsurf: config.and_then(|v| v.windsurf).unwrap_or(false), + zed: config.and_then(|v| v.zed).unwrap_or(false), + } + } + + pub fn is_enabled(self, plugin_name: &str) -> bool { + match plugin_name { + "AgentsOutputAdaptor" => self.agents_md, + "GitExcludeOutputAdaptor" => self.git, + "JetBrainsIDECodeStyleConfigOutputAdaptor" => self.jetbrains_code_style, + "VisualStudioCodeIDEConfigOutputAdaptor" => self.vscode, + "ZedIDEConfigOutputAdaptor" => self.zed, + "ReadmeMdConfigFileOutputAdaptor" => self.readme, + "ClaudeCodeCLIOutputAdaptor" => self.claude_code, + "CodexCLIOutputAdaptor" => self.codex, + "CursorOutputAdaptor" => self.cursor, + "DroidCLIOutputAdaptor" => self.droid, + "GeminiCLIOutputAdaptor" => self.gemini, + "JetBrainsAIAssistantCodexOutputAdaptor" => self.jetbrains, + "KiroCLIOutputAdaptor" => self.kiro, + "OpencodeCLIOutputAdaptor" => self.opencode, + "QoderIDEPluginOutputAdaptor" => self.qoder, + "TraeOutputAdaptor" => self.trae || self.trae_cn, + "WarpIDEOutputAdaptor" => self.warp, + "WindsurfOutputAdaptor" => self.windsurf, + _ => false, + } + } + + pub fn registered_plugins(self) -> Vec { + let mut plugins = Vec::new(); + for plugin_name in [ + "AgentsOutputAdaptor", + "GitExcludeOutputAdaptor", + "JetBrainsIDECodeStyleConfigOutputAdaptor", + "VisualStudioCodeIDEConfigOutputAdaptor", + "ZedIDEConfigOutputAdaptor", + "ReadmeMdConfigFileOutputAdaptor", + "ClaudeCodeCLIOutputAdaptor", + "CodexCLIOutputAdaptor", + "CursorOutputAdaptor", + "DroidCLIOutputAdaptor", + "GeminiCLIOutputAdaptor", + "JetBrainsAIAssistantCodexOutputAdaptor", + "KiroCLIOutputAdaptor", + "OpencodeCLIOutputAdaptor", + "QoderIDEPluginOutputAdaptor", + "TraeOutputAdaptor", + "WarpIDEOutputAdaptor", + "WindsurfOutputAdaptor", + ] { + if self.is_enabled(plugin_name) { + plugins.push(plugin_name.to_string()); + } + } + plugins + } +} + +// --------------------------------------------------------------------------- +// Envelopes for JSON deserialization +// --------------------------------------------------------------------------- + +#[derive(Debug, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WorkspaceEnvelope { + pub workspace: crate::domain::plugin_shared::Workspace, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GlobalMemoryEnvelope { + #[serde(default)] + pub global_memory: Option, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CommandsEnvelope { + #[serde(default)] + pub commands: Vec, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SubAgentsEnvelope { + #[serde(default)] + pub sub_agents: Vec, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SkillsEnvelope { + #[serde(default)] + pub skills: Vec, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RulesEnvelope { + #[serde(default)] + pub rules: Vec, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ReadmeEnvelope { + #[serde(default)] + pub readme_prompts: Vec, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GitIgnoreEnvelope { + #[serde(default)] + pub global_git_ignore: Option, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GitExcludeEnvelope { + #[serde(default)] + pub shadow_git_exclude: Option, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SharedIgnoreEnvelope { + #[serde(default)] + pub ai_agent_ignore_config_files: + Option>, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct VSCodeEnvelope { + #[serde(default)] + pub vscode_config_files: Option>, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ZedEnvelope { + #[serde(default)] + pub zed_config_files: Option>, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct JetBrainsEnvelope { + #[serde(default)] + pub jetbrains_config_files: Option>, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct EditorConfigEnvelope { + #[serde(default)] + pub editor_config_files: Option>, +} + +// --------------------------------------------------------------------------- +// Config helpers +// --------------------------------------------------------------------------- + +pub fn resolve_cwd(cwd: Option<&str>) -> Result { + match cwd { + Some(value) => Ok(config::resolve_workspace_dir(value)), + None => std::env::current_dir().map_err(CliError::IoError), + } +} + +pub fn load_config( + cwd: &Path, + load_user_config: Option, +) -> Result { + if load_user_config == Some(false) { + return Ok(config::MergedConfigResult { + config: UserConfigFile::default(), + sources: vec![], + found: false, + }); + } + + let result = ConfigLoader::with_defaults() + .try_load(cwd) + .map_err(CliError::ConfigError)?; + + if !result.found { + let config_path = config::get_required_global_config_path() + .unwrap_or_else(|_| config::get_global_config_path()); + return Err(CliError::ConfigError(format!( + "Required config file not found at {}. Please create it before running tnmsc.", + config_path.display() + ))); + } + + Ok(result) +} + +pub fn resolve_workspace_dir(_cwd: &Path, config: &UserConfigFile) -> Result { + match config.workspace_dir.as_deref() { + Some(dir) => Ok(config::resolve_workspace_dir(dir)), + None => Err(CliError::ConfigError( + "workspaceDir is required but was not configured. Please set workspaceDir in your .tnmsc.json config file.".to_string(), + )), + } +} + +pub fn build_global_scope(config: &UserConfigFile) -> Option { + let mut scope = serde_json::Map::new(); + + let mut os = serde_json::Map::new(); + os.insert("platform".to_string(), json!(std::env::consts::OS)); + os.insert("arch".to_string(), json!(std::env::consts::ARCH)); + os.insert("name".to_string(), json!(std::env::consts::OS)); + scope.insert("os".to_string(), Value::Object(os)); + + if let Some(profile) = config.profile.as_ref() { + let mut value = serde_json::Map::new(); + if let Some(name) = profile.name.as_ref() { + value.insert("name".to_string(), json!(name)); + } + if let Some(username) = profile.username.as_ref() { + value.insert("username".to_string(), json!(username)); + } + if let Some(gender) = profile.gender.as_ref() { + value.insert("gender".to_string(), json!(gender)); + } + if let Some(birthday) = profile.birthday.as_ref() { + value.insert("birthday".to_string(), json!(birthday)); + } + for (key, extra) in &profile.extra { + value.insert(key.clone(), extra.clone()); + } + if !value.is_empty() { + scope.insert("profile".to_string(), Value::Object(value)); + } + } + + if let Some(code_styles) = config.code_styles.as_ref() { + let mut value = serde_json::Map::new(); + if let Some(indent) = code_styles.indent { + value.insert( + "indent".to_string(), + json!(match indent { + config::CodeStyleIndent::Tab => "tab", + config::CodeStyleIndent::Space => "space", + }), + ); + } + if let Some(tab_size) = code_styles.tab_size { + value.insert("tabSize".to_string(), json!(tab_size)); + } + for (key, extra) in &code_styles.extra { + value.insert(key.clone(), extra.clone()); + } + if !value.is_empty() { + scope.insert("codeStyles".to_string(), Value::Object(value)); + } + } + + let mut tool = serde_json::Map::new(); + tool.insert("name".to_string(), json!("tnmsc")); + tool.insert("version".to_string(), json!(crate::version())); + scope.insert("tool".to_string(), Value::Object(tool)); + + (!scope.is_empty()).then(|| Value::Object(scope)) +} + +pub fn strip_unc_prefix(path: &Path) -> PathBuf { + let s = path.to_string_lossy(); + if let Some(stripped) = s.strip_prefix(r"\\?\") { + PathBuf::from(stripped) + } else { + path.to_path_buf() + } +} + +pub fn count_missing_directories(dir: &Path) -> usize { + let mut missing = Vec::new(); + let mut current = Some(dir); + + while let Some(path) = current { + if path.exists() { + break; + } + missing.push(path.to_path_buf()); + current = path.parent(); + } + + missing.len() +} + +// --------------------------------------------------------------------------- +// JSON collection helpers +// --------------------------------------------------------------------------- + +pub fn collect_json( + collector: impl Fn(&str) -> Result, + input: Value, +) -> Result +where + T: DeserializeOwned, +{ + let raw = collector(&input.to_string())?; + serde_json::from_str(&raw).map_err(CliError::SerializationError) +} + +// --------------------------------------------------------------------------- +// Context collection +// --------------------------------------------------------------------------- + +pub fn collect_context( + workspace_dir: &str, + global_scope: Option<&Value>, + enabled_plugins: &EnabledPlugins, + logger: &Logger, +) -> Result { + + let aindex = { + let _span = logger.span("collect.aindex_resolvers").enter(); + collect_json::( + crate::repositories::aindex_resolvers::collect_aindex_resolvers, + json!({ "workspaceDir": workspace_dir }), + )? + }; + + let project_prompts = { + let _span = logger.span("collect.project_prompt").enter(); + collect_json::( + crate::repositories::project_prompt::collect_project_prompt, + json!({ + "workspaceDir": workspace_dir, + "workspace": aindex.workspace, + "globalScope": global_scope, + }), + )? + }; + + let global_memory = { + let _span = logger.span("collect.global_memory").enter(); + collect_json::( + crate::repositories::global_memory::collect_global_memory, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + + let commands = { + let _span = logger.span("collect.command").enter(); + collect_json::( + crate::repositories::command::collect_command, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + let sub_agents = { + let _span = logger.span("collect.subagent").enter(); + collect_json::( + crate::repositories::subagent::collect_subagent, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + let skills = { + let _span = logger.span("collect.skill").enter(); + collect_json::( + crate::repositories::skill::collect_skill, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + let rules = { + let _span = logger.span("collect.rule").enter(); + collect_json::( + crate::repositories::rule::collect_rule, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + let readme = { + let _span = logger.span("collect.readme").enter(); + collect_json::( + crate::repositories::readme::collect_readme, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + let gitignore = { + let _span = logger.span("collect.gitignore").enter(); + collect_json::( + crate::repositories::gitignore::collect_gitignore, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let git_exclude = { + let _span = logger.span("collect.git_exclude").enter(); + collect_json::( + crate::repositories::git_exclude::collect_git_exclude, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let shared_ignore = { + let _span = logger.span("collect.shared_ignore").enter(); + collect_json::( + crate::repositories::shared_ignore::collect_shared_ignore, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let vscode = { + let _span = logger.span("collect.vscode_config").enter(); + collect_json::( + crate::repositories::vscode_config::collect_vscode_config, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let zed = { + let _span = logger.span("collect.zed_config").enter(); + collect_json::( + crate::repositories::zed_config::collect_zed_config, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let jetbrains = { + let _span = logger.span("collect.jetbrains_config").enter(); + collect_json::( + crate::repositories::jetbrains_config::collect_jetbrains_config, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let editor_config = { + let _span = logger.span("collect.editorconfig").enter(); + collect_json::( + crate::repositories::editorconfig::collect_editorconfig, + json!({ "workspaceDir": workspace_dir }), + )? + }; + + Ok(OutputContext { + workspace: Some(project_prompts.workspace), + vscode_config_files: vscode.vscode_config_files, + zed_config_files: zed.zed_config_files, + jetbrains_config_files: jetbrains.jetbrains_config_files, + editor_config_files: editor_config.editor_config_files, + fast_commands: (!commands.commands.is_empty()).then_some(commands.commands), + sub_agents: (!sub_agents.sub_agents.is_empty()).then_some(sub_agents.sub_agents), + skills: (!skills.skills.is_empty()).then_some(skills.skills), + rules: (!rules.rules.is_empty()).then_some(rules.rules), + global_memory: global_memory.global_memory, + global_git_ignore: gitignore.global_git_ignore, + shadow_git_exclude: git_exclude.shadow_git_exclude, + shadow_source_project_dir: None, + readme_prompts: (!readme.readme_prompts.is_empty()).then_some(readme.readme_prompts), + ai_agent_ignore_config_files: shared_ignore.ai_agent_ignore_config_files, + registered_output_plugins: Some(enabled_plugins.registered_plugins()), + }) +} diff --git a/sdk/src/services/dry_run_service.rs b/sdk/src/services/dry_run_service.rs index 5faf1436..6ac0777b 100644 --- a/sdk/src/services/dry_run_service.rs +++ b/sdk/src/services/dry_run_service.rs @@ -1,28 +1,70 @@ use std::collections::BTreeMap; -use std::path::{Path, PathBuf}; +use std::path::Path; -use serde_json::{Value, json}; +use serde_json::json; -use crate::context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPlansDto}; -use crate::domain::config::{self, ConfigLoader, PluginsConfig, UserConfigFile}; use crate::domain::output_plans::droid_output_plan::DroidOutputPlanDto; -use crate::services::command_diagnostics::build_workspace_mismatch_warning; +use crate::infra::logger::create_logger; +use crate::services::command_diagnostics_service::build_workspace_mismatch_warning; +use crate::services::common::{ + DefaultPluginKind, EnabledPlugins, collect_context, load_config, resolve_cwd, + resolve_workspace_dir, +}; use crate::{CliError, MemorySyncCommandOptions, MemorySyncCommandResult}; +#[derive(Debug, Clone)] +#[allow(dead_code)] +struct PlannedOutputFile { + path: String, + content: String, + encoding: Option, +} + pub fn dry_run(options: MemorySyncCommandOptions) -> Result { + let logger = create_logger("dry_run", options.log_level.as_deref().and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s))); + let _span = logger.span("command.dry_run").enter(); + + logger.info("Dry run started", None); + let cwd = resolve_cwd(options.cwd.as_deref())?; + + let config_span = logger.span("config.load").enter(); let config_result = load_config(&cwd, options.load_user_config)?; + config_span.exit(); + let workspace_dir = resolve_workspace_dir(&cwd, &config_result.config)?; let warnings = build_workspace_mismatch_warning(&cwd, &workspace_dir, &config_result) .into_iter() .collect(); let workspace_dir_str = workspace_dir.to_string_lossy().into_owned(); - let global_scope = build_global_scope(&config_result.config); - let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref()); - let context = collect_context(&workspace_dir_str, global_scope.as_ref())?; + logger.info("Config loaded", Some(json!({ + "workspaceDir": &workspace_dir_str, + "configFound": config_result.found, + }))); + + let global_scope = crate::services::common::build_global_scope(&config_result.config); + let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref(), DefaultPluginKind::DryRun); + + logger.info("Plugins resolved", Some(json!({ + "enabled": enabled_plugins.registered_plugins(), + }))); + + let context_span = logger.span("context.collect").enter(); + let context = collect_context(&workspace_dir_str, global_scope.as_ref(), &enabled_plugins, &logger)?; + context_span.exit(); + + logger.info("Context collected", Some(json!({ + "globalMemory": context.global_memory.is_some(), + "commands": context.fast_commands.as_ref().map(|v| v.len()), + "skills": context.skills.as_ref().map(|v| v.len()), + "rules": context.rules.as_ref().map(|v| v.len()), + }))); + + let output_span = logger.span("output.build").enter(); let planned_outputs = build_output_files(&context, enabled_plugins)?; + output_span.exit(); let mut files_affected = 0usize; let mut dirs_affected = 0usize; @@ -31,12 +73,17 @@ pub fn dry_run(options: MemorySyncCommandOptions) -> Result Result) -> Result { - match cwd { - Some(value) => Ok(config::resolve_workspace_dir(value)), - None => std::env::current_dir().map_err(CliError::IoError), - } -} - -fn load_config( - cwd: &Path, - load_user_config: Option, -) -> Result { - if load_user_config == Some(false) { - return Ok(config::MergedConfigResult { - config: UserConfigFile::default(), - sources: vec![], - found: false, - }); - } - - let result = ConfigLoader::with_defaults() - .try_load(cwd) - .map_err(CliError::ConfigError)?; - - if !result.found { - let config_path = config::get_required_global_config_path() - .unwrap_or_else(|_| config::get_global_config_path()); - return Err(CliError::ConfigError(format!( - "Required config file not found at {}. Please create it before running tnmsc.", - config_path.display() - ))); - } - - Ok(result) -} - -fn resolve_workspace_dir(_cwd: &Path, config: &UserConfigFile) -> Result { - match config.workspace_dir.as_deref() { - Some(dir) => Ok(config::resolve_workspace_dir(dir)), - None => Err(CliError::ConfigError( - "workspaceDir is required but was not configured. Please set workspaceDir in your .tnmsc.json config file.".to_string(), - )), - } -} - -fn build_global_scope(config: &UserConfigFile) -> Option { - let mut scope = serde_json::Map::new(); - - let mut os = serde_json::Map::new(); - os.insert("platform".to_string(), json!(std::env::consts::OS)); - os.insert("arch".to_string(), json!(std::env::consts::ARCH)); - os.insert("name".to_string(), json!(std::env::consts::OS)); - scope.insert("os".to_string(), Value::Object(os)); - - if let Some(profile) = config.profile.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(name) = profile.name.as_ref() { - value.insert("name".to_string(), json!(name)); - } - if let Some(username) = profile.username.as_ref() { - value.insert("username".to_string(), json!(username)); - } - if let Some(gender) = profile.gender.as_ref() { - value.insert("gender".to_string(), json!(gender)); - } - if let Some(birthday) = profile.birthday.as_ref() { - value.insert("birthday".to_string(), json!(birthday)); - } - for (key, extra) in &profile.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("profile".to_string(), Value::Object(value)); - } - } - - if let Some(code_styles) = config.code_styles.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(indent) = code_styles.indent { - value.insert( - "indent".to_string(), - json!(match indent { - config::CodeStyleIndent::Tab => "tab", - config::CodeStyleIndent::Space => "space", - }), - ); - } - if let Some(tab_size) = code_styles.tab_size { - value.insert("tabSize".to_string(), json!(tab_size)); - } - for (key, extra) in &code_styles.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("codeStyles".to_string(), Value::Object(value)); - } - } - - let mut tool = serde_json::Map::new(); - tool.insert("name".to_string(), json!("tnmsc")); - tool.insert("version".to_string(), json!(crate::version())); - scope.insert("tool".to_string(), Value::Object(tool)); - - (!scope.is_empty()).then(|| Value::Object(scope)) -} - -#[derive(Debug, Clone, Copy, Default)] -struct EnabledPlugins { - agents_md: bool, - claude_code: bool, - codex: bool, - cursor: bool, - droid: bool, - gemini: bool, - git: bool, - jetbrains: bool, - jetbrains_code_style: bool, - kiro: bool, - opencode: bool, - qoder: bool, - readme: bool, - trae: bool, - trae_cn: bool, - vscode: bool, - warp: bool, - windsurf: bool, - zed: bool, -} - -impl EnabledPlugins { - fn from_config(config: Option<&PluginsConfig>) -> Self { - Self { - agents_md: config.and_then(|value| value.agents_md).unwrap_or(true), - claude_code: config.and_then(|value| value.claude_code).unwrap_or(false), - codex: config.and_then(|value| value.codex).unwrap_or(false), - cursor: config.and_then(|value| value.cursor).unwrap_or(false), - droid: config.and_then(|value| value.droid).unwrap_or(false), - gemini: config.and_then(|value| value.gemini).unwrap_or(false), - git: config.and_then(|value| value.git).unwrap_or(true), - jetbrains: config.and_then(|value| value.jetbrains).unwrap_or(false), - jetbrains_code_style: config - .and_then(|value| value.jetbrains_code_style) - .unwrap_or(false), - kiro: config.and_then(|value| value.kiro).unwrap_or(false), - opencode: config.and_then(|value| value.opencode).unwrap_or(false), - qoder: config.and_then(|value| value.qoder).unwrap_or(false), - readme: config.and_then(|value| value.readme).unwrap_or(true), - trae: config.and_then(|value| value.trae).unwrap_or(false), - trae_cn: config.and_then(|value| value.trae_cn).unwrap_or(false), - vscode: config.and_then(|value| value.vscode).unwrap_or(false), - warp: config.and_then(|value| value.warp).unwrap_or(false), - windsurf: config.and_then(|value| value.windsurf).unwrap_or(false), - zed: config.and_then(|value| value.zed).unwrap_or(false), - } - } - - fn is_enabled(self, plugin_name: &str) -> bool { - match plugin_name { - "AgentsOutputAdaptor" => self.agents_md, - "GitExcludeOutputAdaptor" => self.git, - "JetBrainsIDECodeStyleConfigOutputAdaptor" => self.jetbrains_code_style, - "VisualStudioCodeIDEConfigOutputAdaptor" => self.vscode, - "ZedIDEConfigOutputAdaptor" => self.zed, - "ReadmeMdConfigFileOutputAdaptor" => self.readme, - "ClaudeCodeCLIOutputAdaptor" => self.claude_code, - "CodexCLIOutputAdaptor" => self.codex, - "CursorOutputAdaptor" => self.cursor, - "DroidCLIOutputAdaptor" => self.droid, - "GeminiCLIOutputAdaptor" => self.gemini, - "JetBrainsAIAssistantCodexOutputAdaptor" => self.jetbrains, - "KiroCLIOutputAdaptor" => self.kiro, - "OpencodeCLIOutputAdaptor" => self.opencode, - "QoderIDEPluginOutputAdaptor" => self.qoder, - "TraeOutputAdaptor" => self.trae || self.trae_cn, - "WarpIDEOutputAdaptor" => self.warp, - "WindsurfOutputAdaptor" => self.windsurf, - _ => false, - } - } -} - -#[derive(Debug, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct WorkspaceEnvelope { - workspace: crate::domain::plugin_shared::Workspace, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct GlobalMemoryEnvelope { - #[serde(default)] - global_memory: Option, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct CommandsEnvelope { - #[serde(default)] - commands: Vec, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct SubAgentsEnvelope { - #[serde(default)] - sub_agents: Vec, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct SkillsEnvelope { - #[serde(default)] - skills: Vec, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct RulesEnvelope { - #[serde(default)] - rules: Vec, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct ReadmeEnvelope { - #[serde(default)] - readme_prompts: Vec, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct GitIgnoreEnvelope { - #[serde(default)] - global_git_ignore: Option, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct GitExcludeEnvelope { - #[serde(default)] - shadow_git_exclude: Option, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct SharedIgnoreEnvelope { - #[serde(default)] - ai_agent_ignore_config_files: Option>, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct VSCodeEnvelope { - #[serde(default)] - vscode_config_files: Option>, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct ZedEnvelope { - #[serde(default)] - zed_config_files: Option>, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct JetBrainsEnvelope { - #[serde(default)] - jetbrains_config_files: Option>, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct EditorConfigEnvelope { - #[serde(default)] - editor_config_files: Option>, -} - -#[derive(Debug, Clone)] -#[allow(dead_code)] -struct PlannedOutputFile { - path: String, - content: String, - encoding: Option, -} - -fn collect_context( - workspace_dir: &str, - _global_scope: Option<&Value>, -) -> Result { - fn collect_json( - collector: impl Fn(&str) -> Result, - input: Value, - ) -> Result - where - T: serde::de::DeserializeOwned, - { - let raw = collector(&input.to_string())?; - serde_json::from_str(&raw).map_err(CliError::SerializationError) - } - - let aindex = collect_json::( - crate::repositories::aindex_resolvers::collect_aindex_resolvers, - json!({ "workspaceDir": workspace_dir }), - )?; - - let project_prompts = collect_json::( - crate::repositories::project_prompt::collect_project_prompt, - json!({ - "workspaceDir": workspace_dir, - "workspace": aindex.workspace, - "globalScope": None::, - }), - )?; - - let global_memory = collect_json::( - crate::repositories::global_memory::collect_global_memory, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - - let commands = collect_json::( - crate::repositories::command::collect_command, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let sub_agents = collect_json::( - crate::repositories::subagent::collect_subagent, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let skills = collect_json::( - crate::repositories::skill::collect_skill, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let rules = collect_json::( - crate::repositories::rule::collect_rule, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let readme = collect_json::( - crate::repositories::readme::collect_readme, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let gitignore = collect_json::( - crate::repositories::gitignore::collect_gitignore, - json!({ "workspaceDir": workspace_dir }), - )?; - let git_exclude = collect_json::( - crate::repositories::git_exclude::collect_git_exclude, - json!({ "workspaceDir": workspace_dir }), - )?; - let shared_ignore = collect_json::( - crate::repositories::shared_ignore::collect_shared_ignore, - json!({ "workspaceDir": workspace_dir }), - )?; - let vscode = collect_json::( - crate::repositories::vscode_config::collect_vscode_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let zed = collect_json::( - crate::repositories::zed_config::collect_zed_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let jetbrains = collect_json::( - crate::repositories::jetbrains_config::collect_jetbrains_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let editor_config = collect_json::( - crate::repositories::editorconfig::collect_editorconfig, - json!({ "workspaceDir": workspace_dir }), - )?; - - Ok(OutputContext { - workspace: Some(project_prompts.workspace), - vscode_config_files: vscode.vscode_config_files, - zed_config_files: zed.zed_config_files, - jetbrains_config_files: jetbrains.jetbrains_config_files, - editor_config_files: editor_config.editor_config_files, - fast_commands: (!commands.commands.is_empty()).then_some(commands.commands), - sub_agents: (!sub_agents.sub_agents.is_empty()).then_some(sub_agents.sub_agents), - skills: (!skills.skills.is_empty()).then_some(skills.skills), - rules: (!rules.rules.is_empty()).then_some(rules.rules), - global_memory: global_memory.global_memory, - global_git_ignore: gitignore.global_git_ignore, - shadow_git_exclude: git_exclude.shadow_git_exclude, - shadow_source_project_dir: None, - readme_prompts: (!readme.readme_prompts.is_empty()).then_some(readme.readme_prompts), - ai_agent_ignore_config_files: shared_ignore.ai_agent_ignore_config_files, - registered_output_plugins: None, - }) -} - fn build_output_files( - context: &OutputContext, + context: &crate::context::OutputContext, enabled_plugins: EnabledPlugins, ) -> Result, CliError> { let mut outputs = BTreeMap::new(); @@ -457,83 +116,52 @@ fn build_output_files( push_base_plans(&mut outputs, &base_plans, enabled_plugins); if enabled_plugins.claude_code { - if let Ok(plan) = - crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.codex { - if let Ok(plan) = - crate::domain::output_plans::codex_output_plan::build_codex_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::codex_output_plan::build_codex_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.cursor { - if let Ok(plan) = - crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.droid { - if let Ok(plan) = - crate::domain::output_plans::droid_output_plan::build_droid_output_plan(context) - { - push_droid_output_files(&mut outputs, &plan); - } + let plan = crate::domain::output_plans::droid_output_plan::build_droid_output_plan(context)?; + push_droid_output_files(&mut outputs, &plan); } if enabled_plugins.gemini { - if let Ok(plan) = - crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.jetbrains { - if let Ok(plan) = crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context) { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.kiro { - if let Ok(plan) = crate::domain::output_plans::kiro_output_plan::build_kiro_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::kiro_output_plan::build_kiro_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.opencode { - if let Ok(plan) = - crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.qoder { - if let Ok(plan) = - crate::domain::output_plans::qoder_output_plan::build_qoder_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::qoder_output_plan::build_qoder_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.trae || enabled_plugins.trae_cn { - if let Ok(plan) = crate::domain::output_plans::trae_output_plan::build_trae_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::trae_output_plan::build_trae_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.warp { - if let Ok(plan) = crate::domain::output_plans::warp_output_plan::build_warp_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::warp_output_plan::build_warp_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.windsurf { - if let Ok(plan) = - crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } Ok(outputs) @@ -561,7 +189,7 @@ fn push_base_output_files( PlannedOutputFile { path: file.path.clone(), content: file.content.clone(), - encoding: None, + encoding: file.encoding.clone(), }, ); } @@ -583,27 +211,12 @@ fn push_droid_output_files( } } -fn count_missing_directories(dir: &Path) -> usize { - let mut missing = Vec::new(); - let mut current = Some(dir); - - while let Some(path) = current { - if path.exists() { - break; - } - missing.push(path.to_path_buf()); - current = path.parent(); - } - - missing.len() -} - #[cfg(test)] mod tests { use super::*; use tempfile::TempDir; - fn with_home_dir(home_dir: &Path, callback: impl FnOnce() -> T) -> T { + fn with_home_dir(home_dir: &std::path::Path, callback: impl FnOnce() -> T) -> T { let _guard = match crate::domain::TEST_ENV_LOCK.lock() { Ok(g) => g, Err(error) => error.into_inner(), @@ -628,7 +241,7 @@ mod tests { result } - fn create_test_config(home_dir: &Path, workspace_dir: &Path) -> std::io::Result<()> { + fn create_test_config(home_dir: &std::path::Path, workspace_dir: &std::path::Path) -> std::io::Result<()> { let config_content = json!({ "workspaceDir": workspace_dir.to_string_lossy() }); @@ -725,14 +338,14 @@ mod tests { fn dry_run_count_missing_directories_works() { let temp_dir = TempDir::new().unwrap(); let nested = temp_dir.path().join("a").join("b").join("c"); - let count = count_missing_directories(&nested); + let count = crate::services::common::count_missing_directories(&nested); assert_eq!(count, 3); } #[test] fn dry_run_count_missing_directories_returns_zero_for_existing() { let temp_dir = TempDir::new().unwrap(); - let count = count_missing_directories(temp_dir.path()); + let count = crate::services::common::count_missing_directories(temp_dir.path()); assert_eq!(count, 0); } } diff --git a/sdk/src/services/install_service.rs b/sdk/src/services/install_service.rs index 4532777b..7b951233 100644 --- a/sdk/src/services/install_service.rs +++ b/sdk/src/services/install_service.rs @@ -1,141 +1,22 @@ use std::collections::BTreeMap; use std::fs; -use std::path::{Path, PathBuf}; +use std::path::Path; use base64::Engine; -use serde::Deserialize; -use serde::de::DeserializeOwned; use serde_json::{Value, json}; -use crate::context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPlansDto}; -use crate::domain::config::{self, ConfigLoader, PluginsConfig, UserConfigFile}; use crate::domain::output_plans::droid_output_plan::DroidOutputPlanDto; -use crate::domain::plugin_shared::{ - AIAgentIgnoreConfigFile, FastCommandPrompt, GlobalMemoryPrompt, ProjectIDEConfigFile, - ReadmePrompt, RulePrompt, SkillPrompt, SubAgentPrompt, Workspace, -}; use crate::infra::desk_paths; +use crate::infra::logger::{Logger, create_logger}; use crate::policy::path_blocking; -use crate::services::command_diagnostics::build_workspace_mismatch_warning; +use crate::services::command_diagnostics_service::build_workspace_mismatch_warning; +use crate::services::common::{ + DefaultPluginKind, EnabledPlugins, collect_context, load_config, resolve_cwd, + resolve_workspace_dir, +}; use crate::{CliError, MemorySyncCommandOptions, MemorySyncCommandResult}; -const PLUGIN_AGENTS: &str = "AgentsOutputAdaptor"; -const PLUGIN_GIT: &str = "GitExcludeOutputAdaptor"; -const PLUGIN_JETBRAINS_CODE_STYLE: &str = "JetBrainsIDECodeStyleConfigOutputAdaptor"; -const PLUGIN_VSCODE: &str = "VisualStudioCodeIDEConfigOutputAdaptor"; -const PLUGIN_ZED: &str = "ZedIDEConfigOutputAdaptor"; -const PLUGIN_README: &str = "ReadmeMdConfigFileOutputAdaptor"; -const PLUGIN_CLAUDE: &str = "ClaudeCodeCLIOutputAdaptor"; -const PLUGIN_CODEX: &str = "CodexCLIOutputAdaptor"; -const PLUGIN_CURSOR: &str = "CursorOutputAdaptor"; -const PLUGIN_DROID: &str = "DroidCLIOutputAdaptor"; -const PLUGIN_GEMINI: &str = "GeminiCLIOutputAdaptor"; -const PLUGIN_JETBRAINS: &str = "JetBrainsAIAssistantCodexOutputAdaptor"; -const PLUGIN_KIRO: &str = "KiroCLIOutputAdaptor"; -const PLUGIN_OPENCODE: &str = "OpencodeCLIOutputAdaptor"; -const PLUGIN_QODER: &str = "QoderIDEPluginOutputAdaptor"; -const PLUGIN_TRAE: &str = "TraeOutputAdaptor"; -const PLUGIN_WARP: &str = "WarpIDEOutputAdaptor"; -const PLUGIN_WINDSURF: &str = "WindsurfOutputAdaptor"; - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct WorkspaceEnvelope { - workspace: Workspace, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct GlobalMemoryEnvelope { - #[serde(default)] - global_memory: Option, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct CommandsEnvelope { - #[serde(default)] - commands: Vec, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct SubAgentsEnvelope { - #[serde(default)] - sub_agents: Vec, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct SkillsEnvelope { - #[serde(default)] - skills: Vec, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct RulesEnvelope { - #[serde(default)] - rules: Vec, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct ReadmeEnvelope { - #[serde(default)] - readme_prompts: Vec, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct GitIgnoreEnvelope { - #[serde(default)] - global_git_ignore: Option, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct GitExcludeEnvelope { - #[serde(default)] - shadow_git_exclude: Option, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct SharedIgnoreEnvelope { - #[serde(default)] - ai_agent_ignore_config_files: Option>, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct VSCodeEnvelope { - #[serde(default)] - vscode_config_files: Option>, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct ZedEnvelope { - #[serde(default)] - zed_config_files: Option>, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct JetBrainsEnvelope { - #[serde(default)] - jetbrains_config_files: Option>, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct EditorConfigEnvelope { - #[serde(default)] - editor_config_files: Option>, -} - #[derive(Debug, Clone)] struct PlannedOutputFile { path: String, @@ -143,128 +24,74 @@ struct PlannedOutputFile { encoding: Option, } -#[derive(Debug, Clone, Copy, Default)] -struct EnabledPlugins { - agents_md: bool, - claude_code: bool, - codex: bool, - cursor: bool, - droid: bool, - gemini: bool, - git: bool, - jetbrains: bool, - jetbrains_code_style: bool, - kiro: bool, - opencode: bool, - qoder: bool, - readme: bool, - trae: bool, - trae_cn: bool, - vscode: bool, - warp: bool, - windsurf: bool, - zed: bool, -} - -impl EnabledPlugins { - fn from_config(config: Option<&PluginsConfig>) -> Self { - Self { - agents_md: config.and_then(|value| value.agents_md).unwrap_or(true), - claude_code: config.and_then(|value| value.claude_code).unwrap_or(true), - codex: config.and_then(|value| value.codex).unwrap_or(false), - cursor: config.and_then(|value| value.cursor).unwrap_or(false), - droid: config.and_then(|value| value.droid).unwrap_or(false), - gemini: config.and_then(|value| value.gemini).unwrap_or(false), - git: config.and_then(|value| value.git).unwrap_or(true), - jetbrains: config.and_then(|value| value.jetbrains).unwrap_or(false), - jetbrains_code_style: config - .and_then(|value| value.jetbrains_code_style) - .unwrap_or(false), - kiro: config.and_then(|value| value.kiro).unwrap_or(false), - opencode: config.and_then(|value| value.opencode).unwrap_or(true), - qoder: config.and_then(|value| value.qoder).unwrap_or(false), - readme: config.and_then(|value| value.readme).unwrap_or(true), - trae: config.and_then(|value| value.trae).unwrap_or(false), - trae_cn: config.and_then(|value| value.trae_cn).unwrap_or(false), - vscode: config.and_then(|value| value.vscode).unwrap_or(false), - warp: config.and_then(|value| value.warp).unwrap_or(false), - windsurf: config.and_then(|value| value.windsurf).unwrap_or(false), - zed: config.and_then(|value| value.zed).unwrap_or(false), - } - } - - fn is_enabled(self, plugin_name: &str) -> bool { - match plugin_name { - PLUGIN_AGENTS => self.agents_md, - PLUGIN_GIT => self.git, - PLUGIN_JETBRAINS_CODE_STYLE => self.jetbrains_code_style, - PLUGIN_VSCODE => self.vscode, - PLUGIN_ZED => self.zed, - PLUGIN_README => self.readme, - PLUGIN_CLAUDE => self.claude_code, - PLUGIN_CODEX => self.codex, - PLUGIN_CURSOR => self.cursor, - PLUGIN_DROID => self.droid, - PLUGIN_GEMINI => self.gemini, - PLUGIN_JETBRAINS => self.jetbrains, - PLUGIN_KIRO => self.kiro, - PLUGIN_OPENCODE => self.opencode, - PLUGIN_QODER => self.qoder, - PLUGIN_TRAE => self.trae || self.trae_cn, - PLUGIN_WARP => self.warp, - PLUGIN_WINDSURF => self.windsurf, - _ => false, - } - } - - fn registered_output_plugins(self) -> Vec { - let mut plugins = Vec::new(); - for plugin_name in [ - PLUGIN_AGENTS, - PLUGIN_GIT, - PLUGIN_JETBRAINS_CODE_STYLE, - PLUGIN_VSCODE, - PLUGIN_ZED, - PLUGIN_README, - PLUGIN_CLAUDE, - PLUGIN_CODEX, - PLUGIN_CURSOR, - PLUGIN_DROID, - PLUGIN_GEMINI, - PLUGIN_JETBRAINS, - PLUGIN_KIRO, - PLUGIN_OPENCODE, - PLUGIN_QODER, - PLUGIN_TRAE, - PLUGIN_WARP, - PLUGIN_WINDSURF, - ] { - if self.is_enabled(plugin_name) { - plugins.push(plugin_name.to_string()); - } - } - plugins - } -} - pub(crate) fn install( options: MemorySyncCommandOptions, ) -> Result { + let logger = create_logger("install", options.log_level.as_deref().and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s))); + let _span = logger.span("command.install").enter(); + + logger.info("Install started", Some(json!({ + "cwd": options.cwd.as_ref(), + }))); + let cwd = resolve_cwd(options.cwd.as_deref())?; + + let config_span = logger.span("config.load").enter(); let config_result = load_config(&cwd, options.load_user_config)?; + config_span.exit(); + let workspace_dir = resolve_workspace_dir(&cwd, &config_result.config)?; let mut warnings = build_workspace_mismatch_warning(&cwd, &workspace_dir, &config_result) .into_iter() .collect::>(); let workspace_dir_str = workspace_dir.to_string_lossy().into_owned(); - let global_scope = build_global_scope(&config_result.config); - let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref()); - let context = collect_context(&workspace_dir_str, global_scope.as_ref(), enabled_plugins)?; - let planned_outputs = build_output_files(&context, enabled_plugins)?; - let execution = write_output_files(&planned_outputs)?; + logger.info("Config loaded", Some(json!({ + "workspaceDir": &workspace_dir_str, + "configFound": config_result.found, + "configSources": config_result.sources, + }))); + + let global_scope = crate::services::common::build_global_scope(&config_result.config); + let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref(), DefaultPluginKind::Install); + + logger.info("Plugins resolved", Some(json!({ + "enabled": enabled_plugins.registered_plugins(), + }))); + + let context_span = logger.span("context.collect").enter(); + let context = collect_context(&workspace_dir_str, global_scope.as_ref(), &enabled_plugins, &logger)?; + context_span.exit(); + + logger.info("Context collected", Some(json!({ + "globalMemory": context.global_memory.is_some(), + "commands": context.fast_commands.as_ref().map(|v| v.len()), + "skills": context.skills.as_ref().map(|v| v.len()), + "rules": context.rules.as_ref().map(|v| v.len()), + }))); + + let output_span = logger.span("output.build").enter(); + let planned_outputs = build_output_files(&context, enabled_plugins, &logger)?; + output_span.exit(); + + logger.info("Output files built", Some(json!({ + "filesPlanned": planned_outputs.len(), + }))); + + let write_span = logger.span("files.write").enter(); + let execution = write_output_files(&planned_outputs, &logger)?; + write_span.exit(); + warnings.extend(execution.warnings); + logger.info("Install completed", Some(json!({ + "success": execution.errors.is_empty(), + "filesAffected": execution.files_affected, + "dirsAffected": execution.dirs_affected, + "warnings": warnings.len(), + "errors": execution.errors.len(), + }))); + Ok(MemorySyncCommandResult { success: execution.errors.is_empty(), files_affected: execution.files_affected as i32, @@ -284,312 +111,89 @@ pub(crate) fn install( }) } -fn resolve_cwd(cwd: Option<&str>) -> Result { - match cwd { - Some(value) => Ok(config::resolve_workspace_dir(value)), - None => std::env::current_dir().map_err(CliError::IoError), - } -} - -fn load_config( - cwd: &Path, - load_user_config: Option, -) -> Result { - if load_user_config == Some(false) { - return Ok(config::MergedConfigResult { - config: UserConfigFile::default(), - sources: vec![], - found: false, - }); - } - - let result = ConfigLoader::with_defaults() - .try_load(cwd) - .map_err(CliError::ConfigError)?; - - if !result.found { - let config_path = config::get_required_global_config_path() - .unwrap_or_else(|_| config::get_global_config_path()); - return Err(CliError::ConfigError(format!( - "Required config file not found at {}. Please create it before running tnmsc.", - config_path.display() - ))); - } - - Ok(result) -} - -fn resolve_workspace_dir(_cwd: &Path, config: &UserConfigFile) -> Result { - match config.workspace_dir.as_deref() { - Some(dir) => Ok(config::resolve_workspace_dir(dir)), - None => Err(CliError::ConfigError( - "workspaceDir is required but was not configured. Please set workspaceDir in your .tnmsc.json config file.".to_string(), - )), - } -} - -fn build_global_scope(config: &UserConfigFile) -> Option { - let mut scope = serde_json::Map::new(); - - let mut os = serde_json::Map::new(); - os.insert("platform".to_string(), json!(std::env::consts::OS)); - os.insert("arch".to_string(), json!(std::env::consts::ARCH)); - os.insert("name".to_string(), json!(std::env::consts::OS)); - scope.insert("os".to_string(), Value::Object(os)); - - if let Some(profile) = config.profile.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(name) = profile.name.as_ref() { - value.insert("name".to_string(), json!(name)); - } - if let Some(username) = profile.username.as_ref() { - value.insert("username".to_string(), json!(username)); - } - if let Some(gender) = profile.gender.as_ref() { - value.insert("gender".to_string(), json!(gender)); - } - if let Some(birthday) = profile.birthday.as_ref() { - value.insert("birthday".to_string(), json!(birthday)); - } - for (key, extra) in &profile.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("profile".to_string(), Value::Object(value)); - } - } - - if let Some(code_styles) = config.code_styles.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(indent) = code_styles.indent { - value.insert( - "indent".to_string(), - json!(match indent { - config::CodeStyleIndent::Tab => "tab", - config::CodeStyleIndent::Space => "space", - }), - ); - } - if let Some(tab_size) = code_styles.tab_size { - value.insert("tabSize".to_string(), json!(tab_size)); - } - for (key, extra) in &code_styles.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("codeStyles".to_string(), Value::Object(value)); - } - } - - let mut tool = serde_json::Map::new(); - tool.insert("name".to_string(), json!("tnmsc")); - tool.insert("version".to_string(), json!(crate::version())); - scope.insert("tool".to_string(), Value::Object(tool)); - - (!scope.is_empty()).then(|| Value::Object(scope)) -} - -fn collect_context( - workspace_dir: &str, - global_scope: Option<&Value>, - enabled_plugins: EnabledPlugins, -) -> Result { - let aindex = collect_json::( - crate::repositories::aindex_resolvers::collect_aindex_resolvers, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - - let project_prompts = collect_json::( - crate::repositories::project_prompt::collect_project_prompt, - json!({ - "workspaceDir": workspace_dir, - "workspace": aindex.workspace, - "globalScope": global_scope, - }), - )?; - - let global_memory = collect_json::( - crate::repositories::global_memory::collect_global_memory, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - - let commands = collect_json::( - crate::repositories::command::collect_command, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - - let sub_agents = collect_json::( - crate::repositories::subagent::collect_subagent, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - - let skills = collect_json::( - crate::repositories::skill::collect_skill, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - - let rules = collect_json::( - crate::repositories::rule::collect_rule, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - let readme = collect_json::( - crate::repositories::readme::collect_readme, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - let gitignore = collect_json::( - crate::repositories::gitignore::collect_gitignore, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let git_exclude = collect_json::( - crate::repositories::git_exclude::collect_git_exclude, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let shared_ignore = collect_json::( - crate::repositories::shared_ignore::collect_shared_ignore, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let vscode = collect_json::( - crate::repositories::vscode_config::collect_vscode_config, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let zed = collect_json::( - crate::repositories::zed_config::collect_zed_config, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let jetbrains = collect_json::( - crate::repositories::jetbrains_config::collect_jetbrains_config, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let editor_config = collect_json::( - crate::repositories::editorconfig::collect_editorconfig, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - - Ok(OutputContext { - workspace: Some(project_prompts.workspace), - vscode_config_files: vscode.vscode_config_files, - zed_config_files: zed.zed_config_files, - jetbrains_config_files: jetbrains.jetbrains_config_files, - editor_config_files: editor_config.editor_config_files, - fast_commands: (!commands.commands.is_empty()).then_some(commands.commands), - sub_agents: (!sub_agents.sub_agents.is_empty()).then_some(sub_agents.sub_agents), - skills: (!skills.skills.is_empty()).then_some(skills.skills), - rules: (!rules.rules.is_empty()).then_some(rules.rules), - global_memory: global_memory.global_memory, - global_git_ignore: gitignore.global_git_ignore, - shadow_git_exclude: git_exclude.shadow_git_exclude, - shadow_source_project_dir: None, - readme_prompts: (!readme.readme_prompts.is_empty()).then_some(readme.readme_prompts), - ai_agent_ignore_config_files: shared_ignore.ai_agent_ignore_config_files, - registered_output_plugins: Some(enabled_plugins.registered_output_plugins()), - }) -} - -fn collect_json( - collector: impl Fn(&str) -> Result, - input: Value, -) -> Result -where - T: DeserializeOwned, -{ - let raw = collector(&input.to_string())?; - serde_json::from_str(&raw).map_err(CliError::SerializationError) -} - fn build_output_files( - context: &OutputContext, + context: &crate::context::OutputContext, enabled_plugins: EnabledPlugins, + logger: &Logger, ) -> Result, CliError> { let mut outputs = BTreeMap::new(); + let base_span = logger.span("output.base_plans").enter(); let base_plans = crate::domain::base_output_plans::build_base_output_plans(context)?; push_base_plans(&mut outputs, &base_plans, enabled_plugins); + base_span.exit(); if enabled_plugins.claude_code { - let plan = - crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context)?; + let plugin_span = logger.span("output.claude_code").enter(); + let plan = crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.codex { + let plugin_span = logger.span("output.codex").enter(); let plan = crate::domain::output_plans::codex_output_plan::build_codex_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.cursor { + let plugin_span = logger.span("output.cursor").enter(); let plan = crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.droid { + let plugin_span = logger.span("output.droid").enter(); let plan = crate::domain::output_plans::droid_output_plan::build_droid_output_plan(context)?; push_droid_output_files(&mut outputs, &plan); + plugin_span.exit(); } if enabled_plugins.gemini { + let plugin_span = logger.span("output.gemini").enter(); let plan = crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.jetbrains { - let plan = - crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context)?; + let plugin_span = logger.span("output.jetbrains").enter(); + let plan = crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.kiro { + let plugin_span = logger.span("output.kiro").enter(); let plan = crate::domain::output_plans::kiro_output_plan::build_kiro_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.opencode { - let plan = - crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context)?; + let plugin_span = logger.span("output.opencode").enter(); + let plan = crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.qoder { + let plugin_span = logger.span("output.qoder").enter(); let plan = crate::domain::output_plans::qoder_output_plan::build_qoder_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.trae || enabled_plugins.trae_cn { + let plugin_span = logger.span("output.trae").enter(); let plan = crate::domain::output_plans::trae_output_plan::build_trae_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.warp { + let plugin_span = logger.span("output.warp").enter(); let plan = crate::domain::output_plans::warp_output_plan::build_warp_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.windsurf { - let plan = - crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context)?; + let plugin_span = logger.span("output.windsurf").enter(); + let plan = crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } Ok(outputs) @@ -648,6 +252,7 @@ struct InstallExecutionResult { fn write_output_files( outputs: &BTreeMap, + logger: &Logger, ) -> Result { let mut files_affected = 0usize; let mut dirs_affected = 0usize; @@ -683,6 +288,7 @@ fn write_output_files( let existing = fs::read(path).ok(); if existing.as_deref() == Some(bytes.as_slice()) { + logger.debug(format!("file.skipped: {}", file.path), Some(json!({ "reason": "unchanged" }))); continue; } @@ -694,6 +300,7 @@ fn write_output_files( continue; } + logger.info(format!("file.written: {}", file.path), None); files_affected += 1; } @@ -728,7 +335,7 @@ fn prepare_target_path(path: &Path, warnings: &mut Vec) -> Result) -> Result usize { - let mut missing = Vec::new(); - let mut current = Some(dir); - - while let Some(path) = current { - if path.exists() { - break; - } - missing.push(path.to_path_buf()); - current = path.parent(); - } - - missing.len() -} - #[cfg(test)] mod tests { use super::*; + use crate::domain::config::UserConfigFile; use std::path::PathBuf; #[test] diff --git a/sdk/src/services/mod.rs b/sdk/src/services/mod.rs index 3afe57e0..4a09122a 100644 --- a/sdk/src/services/mod.rs +++ b/sdk/src/services/mod.rs @@ -1,5 +1,6 @@ pub mod clean_service; -pub mod command_diagnostics; +pub mod command_diagnostics_service; +pub mod common; pub mod dry_run_service; pub mod install_service; -pub mod prompts; +pub mod prompt_service; diff --git a/sdk/src/services/prompts.rs b/sdk/src/services/prompt_service.rs similarity index 97% rename from sdk/src/services/prompts.rs rename to sdk/src/services/prompt_service.rs index 7059696a..a7415091 100644 --- a/sdk/src/services/prompts.rs +++ b/sdk/src/services/prompt_service.rs @@ -1061,6 +1061,9 @@ fn build_prompt_definition_from_id( // --------------------------------------------------------------------------- pub fn list_prompts(options: &ListPromptsOptions) -> Result, String> { + let logger = crate::infra::logger::create_logger("prompt_service", None); + let _span = logger.span("prompt.list").enter(); + let env = resolve_prompt_environment(&options.base)?; let items: Vec = collect_discovered_prompt_ids(&env) .into_iter() @@ -1073,6 +1076,8 @@ pub fn list_prompts(options: &ListPromptsOptions) -> Result Result, String> { + let logger = crate::infra::logger::create_logger("prompt_service", None); + let _span = logger.span("prompt.get").enter(); + let env = resolve_prompt_environment(options)?; let def = build_prompt_definition_from_id(prompt_id, &env)?; - Ok(hydrate_prompt(&def, true)) + let result = hydrate_prompt(&def, true); + + logger.info(format!("Get prompt: {}", prompt_id), Some(serde_json::json!({ "found": result.is_some() }))); + Ok(result) } pub fn upsert_prompt_source(input: &UpsertPromptSourceInput) -> Result { + let logger = crate::infra::logger::create_logger("prompt_service", None); + let _span = logger.span("prompt.upsert").enter(); + let env = resolve_prompt_environment(&input.base)?; let definition = build_prompt_definition_from_id(&input.prompt_id, &env)?; let locale = input.locale.unwrap_or(PromptSourceLocale::Zh); @@ -1098,10 +1112,15 @@ pub fn upsert_prompt_source(input: &UpsertPromptSourceInput) -> Result Result { + let logger = crate::infra::logger::create_logger("prompt_service", None); + let _span = logger.span("prompt.write_artifacts").enter(); + if input.en_content.is_none() { return Err("writePromptArtifacts requires enContent".to_string()); } @@ -1113,5 +1132,7 @@ pub fn write_prompt_artifacts(input: &WritePromptArtifactsInput) -> Result Date: Sat, 25 Apr 2026 09:52:37 +0800 Subject: [PATCH 03/45] chore: release 2026.10425.10151 --- Cargo.lock | 18 +++++++++--------- Cargo.toml | 2 +- cli/npm/darwin-arm64/package.json | 2 +- cli/npm/darwin-x64/package.json | 2 +- cli/npm/linux-arm64-gnu/package.json | 2 +- cli/npm/linux-x64-gnu/package.json | 2 +- cli/npm/win32-x64-msvc/package.json | 2 +- cli/package.json | 12 ++++++------ doc/package.json | 2 +- gui/package.json | 2 +- gui/src-tauri/Cargo.toml | 2 +- gui/src-tauri/tauri.conf.json | 2 +- mcp/npm/darwin-arm64/package.json | 2 +- mcp/npm/darwin-x64/package.json | 2 +- mcp/npm/linux-arm64-gnu/package.json | 2 +- mcp/npm/linux-x64-gnu/package.json | 2 +- mcp/npm/win32-x64-msvc/package.json | 2 +- mcp/package.json | 12 ++++++------ package.json | 2 +- 19 files changed, 37 insertions(+), 37 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 38861b7d..01e6c251 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3062,7 +3062,7 @@ dependencies = [ [[package]] name = "memory-sync" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "tnmsc", ] @@ -6223,7 +6223,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tnmsc" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "clap", "serde_json", @@ -6232,7 +6232,7 @@ dependencies = [ [[package]] name = "tnmsc-integrate-tests" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "flate2", "serde_json", @@ -6242,7 +6242,7 @@ dependencies = [ [[package]] name = "tnmsc-local-tests" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "dirs", "json5", @@ -6251,7 +6251,7 @@ dependencies = [ [[package]] name = "tnmsd" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "base64 0.22.1", "chrono", @@ -6279,7 +6279,7 @@ dependencies = [ [[package]] name = "tnmsg" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "dirs", "proptest", @@ -6294,7 +6294,7 @@ dependencies = [ [[package]] name = "tnmsm" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "clap", "serde_json", @@ -6303,7 +6303,7 @@ dependencies = [ [[package]] name = "tnmsm-integrate-tests" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "serde_json", "testcontainers", @@ -7819,7 +7819,7 @@ dependencies = [ [[package]] name = "xtask" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "clap", "serde", diff --git a/Cargo.toml b/Cargo.toml index 53b112c6..dddfa3e8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,7 +29,7 @@ members = [ ] [workspace.package] -version = "2026.10424.111" +version = "2026.10425.10151" edition = "2024" rust-version = "1.88" license = "AGPL-3.0-only" diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json index 8f602023..6258f449 100644 --- a/cli/npm/darwin-arm64/package.json +++ b/cli/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-arm64", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsc native binary for macOS arm64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json index b69602e4..edb63393 100644 --- a/cli/npm/darwin-x64/package.json +++ b/cli/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-x64", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsc native binary for macOS x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json index 9165ce52..a3d0c325 100644 --- a/cli/npm/linux-arm64-gnu/package.json +++ b/cli/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-arm64-gnu", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsc native binary for Linux arm64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json index 71e0c89f..243e6350 100644 --- a/cli/npm/linux-x64-gnu/package.json +++ b/cli/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-x64-gnu", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsc native binary for Linux x64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json index 32be222e..1e29886d 100644 --- a/cli/npm/win32-x64-msvc/package.json +++ b/cli/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-win32-x64-msvc", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsc native binary for Windows x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/package.json b/cli/package.json index e2fd1921..6ba5505c 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "TrueNine Memory Synchronization CLI metadata package", "author": "TrueNine", "license": "AGPL-3.0-only", @@ -34,10 +34,10 @@ "test": "cargo test --manifest-path Cargo.toml" }, "optionalDependencies": { - "@truenine/memory-sync-cli-darwin-arm64": "2026.10424.111", - "@truenine/memory-sync-cli-darwin-x64": "2026.10424.111", - "@truenine/memory-sync-cli-linux-arm64-gnu": "2026.10424.111", - "@truenine/memory-sync-cli-linux-x64-gnu": "2026.10424.111", - "@truenine/memory-sync-cli-win32-x64-msvc": "2026.10424.111" + "@truenine/memory-sync-cli-darwin-arm64": "2026.10425.10151", + "@truenine/memory-sync-cli-darwin-x64": "2026.10425.10151", + "@truenine/memory-sync-cli-linux-arm64-gnu": "2026.10425.10151", + "@truenine/memory-sync-cli-linux-x64-gnu": "2026.10425.10151", + "@truenine/memory-sync-cli-win32-x64-msvc": "2026.10425.10151" } } diff --git a/doc/package.json b/doc/package.json index 269400e5..75056af1 100644 --- a/doc/package.json +++ b/doc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-docs", - "version": "2026.10424.111", + "version": "2026.10425.10151", "private": true, "packageManager": "pnpm@10.33.0", "description": "Chinese-first manifesto-led documentation site for @truenine/memory-sync.", diff --git a/gui/package.json b/gui/package.json index f1bf5139..b5a5cabb 100644 --- a/gui/package.json +++ b/gui/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-gui", - "version": "2026.10424.111", + "version": "2026.10425.10151", "private": true, "engines": { "node": ">= 22" diff --git a/gui/src-tauri/Cargo.toml b/gui/src-tauri/Cargo.toml index f01a81b9..06405893 100644 --- a/gui/src-tauri/Cargo.toml +++ b/gui/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tnmsg" -version = "2026.10424.111" +version = "2026.10425.10151" description = "Memory Sync desktop GUI application" authors.workspace = true edition.workspace = true diff --git a/gui/src-tauri/tauri.conf.json b/gui/src-tauri/tauri.conf.json index 28f8c704..7e6f1c67 100644 --- a/gui/src-tauri/tauri.conf.json +++ b/gui/src-tauri/tauri.conf.json @@ -1,6 +1,6 @@ { "$schema": "https://schema.tauri.app/config/2", - "version": "2026.10424.111", + "version": "2026.10425.10151", "productName": "Memory Sync", "identifier": "org.truenine.memory-sync", "build": { diff --git a/mcp/npm/darwin-arm64/package.json b/mcp/npm/darwin-arm64/package.json index 90f0ef87..bd246f0e 100644 --- a/mcp/npm/darwin-arm64/package.json +++ b/mcp/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-darwin-arm64", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsm native binary for macOS arm64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/darwin-x64/package.json b/mcp/npm/darwin-x64/package.json index aac18393..cafdbdc6 100644 --- a/mcp/npm/darwin-x64/package.json +++ b/mcp/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-darwin-x64", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsm native binary for macOS x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/linux-arm64-gnu/package.json b/mcp/npm/linux-arm64-gnu/package.json index e95b95d4..3bc21c57 100644 --- a/mcp/npm/linux-arm64-gnu/package.json +++ b/mcp/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-linux-arm64-gnu", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsm native binary for Linux arm64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/linux-x64-gnu/package.json b/mcp/npm/linux-x64-gnu/package.json index f5cd7cd0..3545938a 100644 --- a/mcp/npm/linux-x64-gnu/package.json +++ b/mcp/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-linux-x64-gnu", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsm native binary for Linux x64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/win32-x64-msvc/package.json b/mcp/npm/win32-x64-msvc/package.json index 2c450d08..aa716f6d 100644 --- a/mcp/npm/win32-x64-msvc/package.json +++ b/mcp/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-win32-x64-msvc", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsm native binary for Windows x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/package.json b/mcp/package.json index 1618821a..2469895f 100644 --- a/mcp/package.json +++ b/mcp/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "TrueNine Memory Sync MCP metadata package", "author": "TrueNine", "license": "AGPL-3.0-only", @@ -32,10 +32,10 @@ "test": "cargo test --manifest-path Cargo.toml" }, "optionalDependencies": { - "@truenine/memory-sync-mcp-darwin-arm64": "2026.10424.111", - "@truenine/memory-sync-mcp-darwin-x64": "2026.10424.111", - "@truenine/memory-sync-mcp-linux-arm64-gnu": "2026.10424.111", - "@truenine/memory-sync-mcp-linux-x64-gnu": "2026.10424.111", - "@truenine/memory-sync-mcp-win32-x64-msvc": "2026.10424.111" + "@truenine/memory-sync-mcp-darwin-arm64": "2026.10425.10151", + "@truenine/memory-sync-mcp-darwin-x64": "2026.10425.10151", + "@truenine/memory-sync-mcp-linux-arm64-gnu": "2026.10425.10151", + "@truenine/memory-sync-mcp-linux-x64-gnu": "2026.10425.10151", + "@truenine/memory-sync-mcp-win32-x64-msvc": "2026.10425.10151" } } diff --git a/package.json b/package.json index 322fecbb..96f83d20 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "Cross-AI-tool prompt synchronisation toolkit (CLI + Tauri desktop GUI) — one ruleset, multi-target adaptation. Monorepo powered by pnpm + Turbo.", "license": "AGPL-3.0-only", "keywords": [ From 6994354e4c2e0b2df98dd6355b585d758255a0e6 Mon Sep 17 00:00:00 2001 From: TrueNine Date: Sat, 25 Apr 2026 10:09:23 +0800 Subject: [PATCH 04/45] fix: missing std::fs import in MCP packaging smoke test, cargo fmt --- .../tests/logging_error_feedback.rs | 9 +- .../tests/logging_install_observability.rs | 4 +- cli/local-tests/tests/logging_levels.rs | 4 +- cli/src/cli.rs | 2 - mcp/integrate-tests/tests/packaging_smoke.rs | 2 + mcp/src/main.rs | 13 +- .../domain/output_plans/codex_output_plan.rs | 4 +- .../domain/output_plans/cursor_output_plan.rs | 4 +- .../domain/output_plans/droid_output_plan.rs | 4 +- .../domain/output_plans/gemini_output_plan.rs | 4 +- .../generic_skills_output_plan.rs | 4 +- .../domain/output_plans/kiro_output_plan.rs | 4 +- .../output_plans/opencode_output_plan.rs | 4 +- .../domain/output_plans/qoder_output_plan.rs | 4 +- .../domain/output_plans/trae_output_plan.rs | 4 +- .../domain/output_plans/warp_output_plan.rs | 4 +- .../output_plans/windsurf_output_plan.rs | 4 +- sdk/src/infra/git_fs.rs | 6 +- sdk/src/infra/logger/core.rs | 18 +- sdk/src/infra/logger/diagnostic.rs | 14 +- sdk/src/infra/logger/formatter.rs | 34 ++- sdk/src/infra/logger/mod.rs | 8 +- sdk/src/infra/logger/sink.rs | 17 +- sdk/src/services/clean_service.rs | 281 +++++++++++++----- sdk/src/services/common.rs | 3 +- sdk/src/services/dry_run_service.rs | 82 +++-- sdk/src/services/install_service.rs | 108 ++++--- sdk/src/services/prompt_service.rs | 10 +- 28 files changed, 461 insertions(+), 198 deletions(-) diff --git a/cli/local-tests/tests/logging_error_feedback.rs b/cli/local-tests/tests/logging_error_feedback.rs index 952bd969..160bd7ee 100644 --- a/cli/local-tests/tests/logging_error_feedback.rs +++ b/cli/local-tests/tests/logging_error_feedback.rs @@ -30,21 +30,24 @@ fn missing_config_outputs_diagnostic_with_fix() { assert!( result.stderr.contains("What happened") || result.stdout.contains("What happened"), "error should contain 'What happened' section. stdout:\n{}\nstderr:\n{}", - result.stdout, result.stderr + result.stdout, + result.stderr ); // 验证有修复建议(嵌入在错误消息中) assert!( result.stderr.contains("Please create it") || result.stdout.contains("Please create it"), "error should contain fix suggestion. stdout:\n{}\nstderr:\n{}", - result.stdout, result.stderr + result.stdout, + result.stderr ); // 验证提及配置文件 assert!( result.stderr.contains(".tnmsc.json") || result.stdout.contains(".tnmsc.json"), "error should mention .tnmsc.json. stdout:\n{}\nstderr:\n{}", - result.stdout, result.stderr + result.stdout, + result.stderr ); } diff --git a/cli/local-tests/tests/logging_install_observability.rs b/cli/local-tests/tests/logging_install_observability.rs index 1993c75f..ac9b2585 100644 --- a/cli/local-tests/tests/logging_install_observability.rs +++ b/cli/local-tests/tests/logging_install_observability.rs @@ -49,7 +49,9 @@ fn install_outputs_key_spans_and_events() { // 验证 collector span assert!( - result.stdout.contains("### collect.aindex_resolvers started"), + result + .stdout + .contains("### collect.aindex_resolvers started"), "install should output 'collect.aindex_resolvers' span. stdout:\n{}", result.stdout ); diff --git a/cli/local-tests/tests/logging_levels.rs b/cli/local-tests/tests/logging_levels.rs index b04ba3dc..fa5fc249 100644 --- a/cli/local-tests/tests/logging_levels.rs +++ b/cli/local-tests/tests/logging_levels.rs @@ -16,7 +16,9 @@ fn trace_level_outputs_span_events() { // Trace 级别应该输出 collector span assert!( - result.stdout.contains("### collect.aindex_resolvers started"), + result + .stdout + .contains("### collect.aindex_resolvers started"), "--trace should output collector spans. stdout:\n{}", result.stdout ); diff --git a/cli/src/cli.rs b/cli/src/cli.rs index 0ab42be5..60dc4179 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -112,8 +112,6 @@ impl ResolvedLogLevel { Self::Error => "error", } } - - } /// Resolve log level from CLI flags. diff --git a/mcp/integrate-tests/tests/packaging_smoke.rs b/mcp/integrate-tests/tests/packaging_smoke.rs index 4c865f8b..bad8059b 100644 --- a/mcp/integrate-tests/tests/packaging_smoke.rs +++ b/mcp/integrate-tests/tests/packaging_smoke.rs @@ -1,3 +1,5 @@ +use std::fs; + #[cfg(unix)] use std::os::unix::fs::PermissionsExt; diff --git a/mcp/src/main.rs b/mcp/src/main.rs index 017697d5..8cc7d8cc 100644 --- a/mcp/src/main.rs +++ b/mcp/src/main.rs @@ -327,7 +327,7 @@ fn main() -> ExitCode { std::env::var("LOG_LEVEL") .ok() .and_then(|s| tnmsd::infra::logger::LogLevel::from_str_loose(&s)) - .unwrap_or(tnmsd::infra::logger::LogLevel::Info) + .unwrap_or(tnmsd::infra::logger::LogLevel::Info), ); let cli = Cli::parse(); @@ -336,10 +336,13 @@ fn main() -> ExitCode { match resolve_command(&cli) { ResolvedCommand::Serve => { let _span = logger.span("server.serve").enter(); - logger.info("MCP server started", Some(json!({ - "serverName": SERVER_NAME, - "protocolVersion": PROTOCOL_VERSION, - }))); + logger.info( + "MCP server started", + Some(json!({ + "serverName": SERVER_NAME, + "protocolVersion": PROTOCOL_VERSION, + })), + ); run_stdio_server(); ExitCode::SUCCESS } diff --git a/sdk/src/domain/output_plans/codex_output_plan.rs b/sdk/src/domain/output_plans/codex_output_plan.rs index 68eb89a0..3bd072b0 100644 --- a/sdk/src/domain/output_plans/codex_output_plan.rs +++ b/sdk/src/domain/output_plans/codex_output_plan.rs @@ -18,11 +18,11 @@ use std::path::PathBuf; use crate::CliError; -use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; use crate::domain::config; +use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const CODEX_PLUGIN_NAME: &str = "CodexCLIOutputAdaptor"; const CODEX_INSTRUCTIONS_FILE: &str = "AGENTS.md"; diff --git a/sdk/src/domain/output_plans/cursor_output_plan.rs b/sdk/src/domain/output_plans/cursor_output_plan.rs index 8edbfb6e..23e0c270 100644 --- a/sdk/src/domain/output_plans/cursor_output_plan.rs +++ b/sdk/src/domain/output_plans/cursor_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; -use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::output_context::OutputContext; +use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; const CURSOR_PLUGIN_NAME: &str = "CursorOutputAdaptor"; const CURSOR_MEMORY_FILE: &str = ".cursorrules"; diff --git a/sdk/src/domain/output_plans/droid_output_plan.rs b/sdk/src/domain/output_plans/droid_output_plan.rs index c0220f13..d7dafc3b 100644 --- a/sdk/src/domain/output_plans/droid_output_plan.rs +++ b/sdk/src/domain/output_plans/droid_output_plan.rs @@ -5,13 +5,13 @@ use serde::{Deserialize, Serialize}; use serde_json::{Map, Value}; use crate::CliError; -use crate::domain::output_context::OutputContext; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; use crate::domain::config; +use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::{ FastCommandPrompt, Project, RelativePath, RuleScope, SkillPrompt, SkillResourceEncoding, Workspace, }; -use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const DROID_PLUGIN_NAME: &str = "DroidCLIOutputAdaptor"; const DROID_MEMORY_FILE: &str = "AGENTS.md"; diff --git a/sdk/src/domain/output_plans/gemini_output_plan.rs b/sdk/src/domain/output_plans/gemini_output_plan.rs index a3ad9ea0..ea7b4c69 100644 --- a/sdk/src/domain/output_plans/gemini_output_plan.rs +++ b/sdk/src/domain/output_plans/gemini_output_plan.rs @@ -2,11 +2,11 @@ use std::collections::HashSet; use std::path::PathBuf; use crate::CliError; -use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; use crate::domain::config; +use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const GEMINI_PLUGIN_NAME: &str = "GeminiCLIOutputAdaptor"; const GEMINI_MEMORY_FILE: &str = "GEMINI.md"; diff --git a/sdk/src/domain/output_plans/generic_skills_output_plan.rs b/sdk/src/domain/output_plans/generic_skills_output_plan.rs index 7210563a..87626a36 100644 --- a/sdk/src/domain/output_plans/generic_skills_output_plan.rs +++ b/sdk/src/domain/output_plans/generic_skills_output_plan.rs @@ -1,8 +1,8 @@ use crate::CliError; -use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; -use crate::domain::plugin_shared::Workspace; use crate::domain::cleanup::CleanupDeclarationsDto; +use crate::domain::output_context::OutputContext; +use crate::domain::plugin_shared::Workspace; const GENERIC_SKILLS_PLUGIN_NAME: &str = "GenericSkillsOutputAdaptor"; diff --git a/sdk/src/domain/output_plans/kiro_output_plan.rs b/sdk/src/domain/output_plans/kiro_output_plan.rs index 472b5095..3fc88d26 100644 --- a/sdk/src/domain/output_plans/kiro_output_plan.rs +++ b/sdk/src/domain/output_plans/kiro_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; -use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::output_context::OutputContext; +use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; const KIRO_PLUGIN_NAME: &str = "KiroCLIOutputAdaptor"; const PROJECT_SCOPE: &str = "project"; diff --git a/sdk/src/domain/output_plans/opencode_output_plan.rs b/sdk/src/domain/output_plans/opencode_output_plan.rs index 6bd98f61..11f34092 100644 --- a/sdk/src/domain/output_plans/opencode_output_plan.rs +++ b/sdk/src/domain/output_plans/opencode_output_plan.rs @@ -3,11 +3,11 @@ use std::path::PathBuf; use serde_json::Value; use crate::CliError; -use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; use crate::domain::config; +use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const OPENCODE_PLUGIN_NAME: &str = "OpencodeCLIOutputAdaptor"; const OPENCODE_MEMORY_FILE: &str = "AGENTS.md"; diff --git a/sdk/src/domain/output_plans/qoder_output_plan.rs b/sdk/src/domain/output_plans/qoder_output_plan.rs index 81fdb322..56fe563c 100644 --- a/sdk/src/domain/output_plans/qoder_output_plan.rs +++ b/sdk/src/domain/output_plans/qoder_output_plan.rs @@ -1,8 +1,8 @@ use crate::CliError; -use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; -use crate::domain::plugin_shared::Workspace; use crate::domain::cleanup::CleanupDeclarationsDto; +use crate::domain::output_context::OutputContext; +use crate::domain::plugin_shared::Workspace; const QODER_PLUGIN_NAME: &str = "QoderIDEPluginOutputAdaptor"; diff --git a/sdk/src/domain/output_plans/trae_output_plan.rs b/sdk/src/domain/output_plans/trae_output_plan.rs index 6c4c9f42..353958bd 100644 --- a/sdk/src/domain/output_plans/trae_output_plan.rs +++ b/sdk/src/domain/output_plans/trae_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; -use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::output_context::OutputContext; +use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; const TRAE_PLUGIN_NAME: &str = "TraeOutputAdaptor"; const TRAE_STEERING_FILE: &str = "GLOBAL.md"; diff --git a/sdk/src/domain/output_plans/warp_output_plan.rs b/sdk/src/domain/output_plans/warp_output_plan.rs index 5a604ae9..4a9d9294 100644 --- a/sdk/src/domain/output_plans/warp_output_plan.rs +++ b/sdk/src/domain/output_plans/warp_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; -use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::output_context::OutputContext; +use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; const WARP_PLUGIN_NAME: &str = "WarpIDEOutputAdaptor"; const WARP_MEMORY_FILE: &str = "WARP.md"; diff --git a/sdk/src/domain/output_plans/windsurf_output_plan.rs b/sdk/src/domain/output_plans/windsurf_output_plan.rs index c97b302c..7eed372f 100644 --- a/sdk/src/domain/output_plans/windsurf_output_plan.rs +++ b/sdk/src/domain/output_plans/windsurf_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; -use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::output_context::OutputContext; +use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; const WINDSURF_PLUGIN_NAME: &str = "WindsurfOutputAdaptor"; const WINDSURF_MEMORY_FILE: &str = ".windsurfrules"; diff --git a/sdk/src/infra/git_fs.rs b/sdk/src/infra/git_fs.rs index 9197d21b..4a0d31df 100644 --- a/sdk/src/infra/git_fs.rs +++ b/sdk/src/infra/git_fs.rs @@ -101,7 +101,11 @@ mod tests { let result = resolve_git_info_dir(tmp.path()); assert!(result.is_some()); - let result_str = result.as_ref().unwrap().to_string_lossy().replace('\\', "/"); + let result_str = result + .as_ref() + .unwrap() + .to_string_lossy() + .replace('\\', "/"); // On Windows, absolute paths starting with / get a drive letter prefix let result_normalized = result_str .strip_prefix("C:") diff --git a/sdk/src/infra/logger/core.rs b/sdk/src/infra/logger/core.rs index 47191a45..406466f1 100644 --- a/sdk/src/infra/logger/core.rs +++ b/sdk/src/infra/logger/core.rs @@ -4,7 +4,9 @@ use std::time::{Duration, Instant}; use serde::Serialize; use serde_json::Value; -use super::diagnostic::{DiagnosticInput, invalid_record, record_from_input, validate_diagnostic_input}; +use super::diagnostic::{ + DiagnosticInput, invalid_record, record_from_input, validate_diagnostic_input, +}; use super::sink::buffer_diagnostic; // --------------------------------------------------------------------------- @@ -102,7 +104,10 @@ impl SpanGuard { fn new(span: Span) -> Self { // Emit span enter event immediately crate::infra::logger::sink::write_span_enter(&span); - Self { span, exited: false } + Self { + span, + exited: false, + } } pub fn exit(mut self) { @@ -204,9 +209,12 @@ impl Logger { fn log_diagnostic(&self, level: LogLevel, diagnostic: DiagnosticInput) { let record = match validate_diagnostic_input(&diagnostic) { Ok(()) => record_from_input(&self.namespace, level.as_str(), diagnostic), - Err(errors) => { - invalid_record(&self.namespace, level.as_str(), serde_json::to_value(&diagnostic).unwrap_or_default(), &errors) - } + Err(errors) => invalid_record( + &self.namespace, + level.as_str(), + serde_json::to_value(&diagnostic).unwrap_or_default(), + &errors, + ), }; // Buffer diagnostics even if level is Silent diff --git a/sdk/src/infra/logger/diagnostic.rs b/sdk/src/infra/logger/diagnostic.rs index 22223c71..75750b9e 100644 --- a/sdk/src/infra/logger/diagnostic.rs +++ b/sdk/src/infra/logger/diagnostic.rs @@ -50,7 +50,9 @@ pub fn validate_diagnostic_input(input: &DiagnosticInput) -> Result<(), Vec Result<(), Vec DiagnosticRecord { +pub fn record_from_input(namespace: &str, level: &str, input: DiagnosticInput) -> DiagnosticRecord { let mut record = DiagnosticRecord { code: input.code.trim().to_string(), title: input.title.trim().to_string(), diff --git a/sdk/src/infra/logger/formatter.rs b/sdk/src/infra/logger/formatter.rs index 768ba641..191941c6 100644 --- a/sdk/src/infra/logger/formatter.rs +++ b/sdk/src/infra/logger/formatter.rs @@ -5,12 +5,8 @@ use super::core::{Event, LogLevel, Span}; /// Format an event as Markdown. pub fn format_event(event: &Event) -> String { match event.level { - LogLevel::Warn | LogLevel::Error | LogLevel::Fatal => { - format_diagnostic_event(event) - } - _ => { - format_message_event(event) - } + LogLevel::Warn | LogLevel::Error | LogLevel::Fatal => format_diagnostic_event(event), + _ => format_message_event(event), } } @@ -22,7 +18,10 @@ pub fn format_span_enter(span: &Span) -> String { /// Format a span exit event with duration. pub fn format_span_exit(span: &Span) -> String { let duration_ms = span.duration().as_millis(); - format!("### {} completed\n - duration: {}ms", span.name, duration_ms) + format!( + "### {} completed\n - duration: {}ms", + span.name, duration_ms + ) } fn format_message_event(event: &Event) -> String { @@ -52,10 +51,11 @@ fn format_message_event(event: &Event) -> String { fn format_diagnostic_event(event: &Event) -> String { // For diagnostic events, the message contains the serialized DiagnosticRecord - let record: super::diagnostic::DiagnosticRecord = match serde_json::from_value(event.message.clone()) { - Ok(r) => r, - Err(_) => return "### Diagnostic error\n - failed to parse diagnostic record".to_string(), - }; + let record: super::diagnostic::DiagnosticRecord = + match serde_json::from_value(event.message.clone()) { + Ok(r) => r, + Err(_) => return "### Diagnostic error\n - failed to parse diagnostic record".to_string(), + }; let mut lines = vec![format!("### {}", record.title)]; @@ -108,7 +108,10 @@ fn format_diagnostic_event(event: &Event) -> String { lines.join("\n") } -fn extract_message_and_meta(message: &Value, meta: Option<&Value>) -> (Option, Vec) { +fn extract_message_and_meta( + message: &Value, + meta: Option<&Value>, +) -> (Option, Vec) { let (msg, mut lines) = match message { Value::String(s) => (Some(s.clone()), Vec::new()), Value::Object(map) => { @@ -159,7 +162,12 @@ pub(crate) fn value_to_markdown_lines(value: &Value) -> Vec { lines } -pub(crate) fn append_markdown_value(lines: &mut Vec, label: Option<&str>, value: &Value, depth: usize) { +pub(crate) fn append_markdown_value( + lines: &mut Vec, + label: Option<&str>, + value: &Value, + depth: usize, +) { let prefix = " ".repeat(depth); let bullet = format!("{prefix}- "); diff --git a/sdk/src/infra/logger/mod.rs b/sdk/src/infra/logger/mod.rs index 39009be4..6d773bd0 100644 --- a/sdk/src/infra/logger/mod.rs +++ b/sdk/src/infra/logger/mod.rs @@ -10,7 +10,9 @@ pub mod diagnostic; pub mod formatter; pub mod sink; -pub use core::{LogLevel, Logger, Span, SpanGuard, get_global_level, resolve_level, set_global_level}; +pub use core::{ + LogLevel, Logger, Span, SpanGuard, get_global_level, resolve_level, set_global_level, +}; pub use diagnostic::{DiagnosticInput, DiagnosticRecord, validate_diagnostic_input}; pub use sink::{clear_diagnostics, drain_diagnostics, flush}; @@ -232,7 +234,9 @@ mod tests { #[test] fn test_resolve_level_fallback_to_global() { set_global_level(LogLevel::Warn); - unsafe { std::env::remove_var("LOG_LEVEL"); } + unsafe { + std::env::remove_var("LOG_LEVEL"); + } let level = resolve_level(None); assert_eq!(level, LogLevel::Warn); } diff --git a/sdk/src/infra/logger/sink.rs b/sdk/src/infra/logger/sink.rs index 2aab9a9c..5d5c5913 100644 --- a/sdk/src/infra/logger/sink.rs +++ b/sdk/src/infra/logger/sink.rs @@ -1,5 +1,5 @@ use std::io::{self, Write}; -use std::sync::mpsc::{self, Sender, Receiver}; +use std::sync::mpsc::{self, Receiver, Sender}; use std::sync::{LazyLock, Mutex}; use std::thread; @@ -29,7 +29,10 @@ static DIAGNOSTIC_BUFFER: LazyLock>> = // --------------------------------------------------------------------------- pub fn write_event(event: &Event) { - let use_stderr = matches!(event.level, LogLevel::Error | LogLevel::Fatal | LogLevel::Warn); + let use_stderr = matches!( + event.level, + LogLevel::Error | LogLevel::Fatal | LogLevel::Warn + ); let output = formatter::format_event(event); send_output(use_stderr, output); } @@ -65,7 +68,10 @@ pub fn clear_diagnostics() { pub fn flush() { let (ack_tx, ack_rx) = mpsc::channel(); - if OUTPUT_SINK.send(OutputCommand::Flush { ack: ack_tx }).is_ok() { + if OUTPUT_SINK + .send(OutputCommand::Flush { ack: ack_tx }) + .is_ok() + { let _ = ack_rx.recv(); } } @@ -76,7 +82,10 @@ pub fn flush() { fn send_output(use_stderr: bool, output: String) { if OUTPUT_SINK - .send(OutputCommand::Write { use_stderr, output: output.clone() }) + .send(OutputCommand::Write { + use_stderr, + output: output.clone(), + }) .is_err() { // Fallback: write directly if sink thread is dead diff --git a/sdk/src/services/clean_service.rs b/sdk/src/services/clean_service.rs index b859f1fb..aa0c1bfe 100644 --- a/sdk/src/services/clean_service.rs +++ b/sdk/src/services/clean_service.rs @@ -16,7 +16,13 @@ use crate::services::common::{ use crate::{CliError, MemorySyncCommandOptions, MemorySyncCommandResult}; pub fn clean(options: MemorySyncCommandOptions) -> Result { - let logger = create_logger("clean", options.log_level.as_deref().and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s))); + let logger = create_logger( + "clean", + options + .log_level + .as_deref() + .and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s)), + ); let _span = logger.span("command.clean").enter(); logger.info("Clean started", None); @@ -31,36 +37,56 @@ pub fn clean(options: MemorySyncCommandOptions) -> Result Result Result Result std::io::Result<()> { + fn create_test_config( + home_dir: &std::path::Path, + workspace_dir: &std::path::Path, + ) -> std::io::Result<()> { let config_content = json!({ "workspaceDir": workspace_dir.to_string_lossy() }); @@ -874,10 +1021,8 @@ mod tests { std::fs::create_dir_all(ws.join("project-b")).unwrap(); let scope = ws.join("project-a"); - let snapshot = build_cleanup_snapshot(&ws.to_string_lossy(), - &HashMap::new(), - &HashMap::new(), - ).unwrap(); + let snapshot = + build_cleanup_snapshot(&ws.to_string_lossy(), &HashMap::new(), &HashMap::new()).unwrap(); let filtered = filter_snapshot_by_scope(snapshot, &scope, ws); diff --git a/sdk/src/services/common.rs b/sdk/src/services/common.rs index f9e23bc7..4ec68f07 100644 --- a/sdk/src/services/common.rs +++ b/sdk/src/services/common.rs @@ -3,10 +3,10 @@ use std::path::{Path, PathBuf}; use serde::de::DeserializeOwned; use serde_json::{Value, json}; +use crate::CliError; use crate::context::OutputContext; use crate::domain::config::{self, ConfigLoader, PluginsConfig, UserConfigFile}; use crate::infra::logger::Logger; -use crate::CliError; // --------------------------------------------------------------------------- // Plugin defaults @@ -386,7 +386,6 @@ pub fn collect_context( enabled_plugins: &EnabledPlugins, logger: &Logger, ) -> Result { - let aindex = { let _span = logger.span("collect.aindex_resolvers").enter(); collect_json::( diff --git a/sdk/src/services/dry_run_service.rs b/sdk/src/services/dry_run_service.rs index 6ac0777b..62043b37 100644 --- a/sdk/src/services/dry_run_service.rs +++ b/sdk/src/services/dry_run_service.rs @@ -22,7 +22,13 @@ struct PlannedOutputFile { } pub fn dry_run(options: MemorySyncCommandOptions) -> Result { - let logger = create_logger("dry_run", options.log_level.as_deref().and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s))); + let logger = create_logger( + "dry_run", + options + .log_level + .as_deref() + .and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s)), + ); let _span = logger.span("command.dry_run").enter(); logger.info("Dry run started", None); @@ -39,28 +45,45 @@ pub fn dry_run(options: MemorySyncCommandOptions) -> Result Result std::io::Result<()> { + fn create_test_config( + home_dir: &std::path::Path, + workspace_dir: &std::path::Path, + ) -> std::io::Result<()> { let config_content = json!({ "workspaceDir": workspace_dir.to_string_lossy() }); diff --git a/sdk/src/services/install_service.rs b/sdk/src/services/install_service.rs index 7b951233..bc12deaa 100644 --- a/sdk/src/services/install_service.rs +++ b/sdk/src/services/install_service.rs @@ -27,12 +27,21 @@ struct PlannedOutputFile { pub(crate) fn install( options: MemorySyncCommandOptions, ) -> Result { - let logger = create_logger("install", options.log_level.as_deref().and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s))); + let logger = create_logger( + "install", + options + .log_level + .as_deref() + .and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s)), + ); let _span = logger.span("command.install").enter(); - logger.info("Install started", Some(json!({ - "cwd": options.cwd.as_ref(), - }))); + logger.info( + "Install started", + Some(json!({ + "cwd": options.cwd.as_ref(), + })), + ); let cwd = resolve_cwd(options.cwd.as_deref())?; @@ -46,37 +55,57 @@ pub(crate) fn install( .collect::>(); let workspace_dir_str = workspace_dir.to_string_lossy().into_owned(); - logger.info("Config loaded", Some(json!({ - "workspaceDir": &workspace_dir_str, - "configFound": config_result.found, - "configSources": config_result.sources, - }))); + logger.info( + "Config loaded", + Some(json!({ + "workspaceDir": &workspace_dir_str, + "configFound": config_result.found, + "configSources": config_result.sources, + })), + ); let global_scope = crate::services::common::build_global_scope(&config_result.config); - let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref(), DefaultPluginKind::Install); - - logger.info("Plugins resolved", Some(json!({ - "enabled": enabled_plugins.registered_plugins(), - }))); + let enabled_plugins = EnabledPlugins::from_config( + config_result.config.plugins.as_ref(), + DefaultPluginKind::Install, + ); + + logger.info( + "Plugins resolved", + Some(json!({ + "enabled": enabled_plugins.registered_plugins(), + })), + ); let context_span = logger.span("context.collect").enter(); - let context = collect_context(&workspace_dir_str, global_scope.as_ref(), &enabled_plugins, &logger)?; + let context = collect_context( + &workspace_dir_str, + global_scope.as_ref(), + &enabled_plugins, + &logger, + )?; context_span.exit(); - logger.info("Context collected", Some(json!({ - "globalMemory": context.global_memory.is_some(), - "commands": context.fast_commands.as_ref().map(|v| v.len()), - "skills": context.skills.as_ref().map(|v| v.len()), - "rules": context.rules.as_ref().map(|v| v.len()), - }))); + logger.info( + "Context collected", + Some(json!({ + "globalMemory": context.global_memory.is_some(), + "commands": context.fast_commands.as_ref().map(|v| v.len()), + "skills": context.skills.as_ref().map(|v| v.len()), + "rules": context.rules.as_ref().map(|v| v.len()), + })), + ); let output_span = logger.span("output.build").enter(); let planned_outputs = build_output_files(&context, enabled_plugins, &logger)?; output_span.exit(); - logger.info("Output files built", Some(json!({ - "filesPlanned": planned_outputs.len(), - }))); + logger.info( + "Output files built", + Some(json!({ + "filesPlanned": planned_outputs.len(), + })), + ); let write_span = logger.span("files.write").enter(); let execution = write_output_files(&planned_outputs, &logger)?; @@ -84,13 +113,16 @@ pub(crate) fn install( warnings.extend(execution.warnings); - logger.info("Install completed", Some(json!({ - "success": execution.errors.is_empty(), - "filesAffected": execution.files_affected, - "dirsAffected": execution.dirs_affected, - "warnings": warnings.len(), - "errors": execution.errors.len(), - }))); + logger.info( + "Install completed", + Some(json!({ + "success": execution.errors.is_empty(), + "filesAffected": execution.files_affected, + "dirsAffected": execution.dirs_affected, + "warnings": warnings.len(), + "errors": execution.errors.len(), + })), + ); Ok(MemorySyncCommandResult { success: execution.errors.is_empty(), @@ -125,7 +157,8 @@ fn build_output_files( if enabled_plugins.claude_code { let plugin_span = logger.span("output.claude_code").enter(); - let plan = crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context)?; + let plan = + crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); plugin_span.exit(); } @@ -167,7 +200,8 @@ fn build_output_files( } if enabled_plugins.opencode { let plugin_span = logger.span("output.opencode").enter(); - let plan = crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context)?; + let plan = + crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); plugin_span.exit(); } @@ -191,7 +225,8 @@ fn build_output_files( } if enabled_plugins.windsurf { let plugin_span = logger.span("output.windsurf").enter(); - let plan = crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context)?; + let plan = + crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); plugin_span.exit(); } @@ -288,7 +323,10 @@ fn write_output_files( let existing = fs::read(path).ok(); if existing.as_deref() == Some(bytes.as_slice()) { - logger.debug(format!("file.skipped: {}", file.path), Some(json!({ "reason": "unchanged" }))); + logger.debug( + format!("file.skipped: {}", file.path), + Some(json!({ "reason": "unchanged" })), + ); continue; } diff --git a/sdk/src/services/prompt_service.rs b/sdk/src/services/prompt_service.rs index a7415091..93afadad 100644 --- a/sdk/src/services/prompt_service.rs +++ b/sdk/src/services/prompt_service.rs @@ -1092,7 +1092,10 @@ pub fn get_prompt( let def = build_prompt_definition_from_id(prompt_id, &env)?; let result = hydrate_prompt(&def, true); - logger.info(format!("Get prompt: {}", prompt_id), Some(serde_json::json!({ "found": result.is_some() }))); + logger.info( + format!("Get prompt: {}", prompt_id), + Some(serde_json::json!({ "found": result.is_some() })), + ); Ok(result) } @@ -1113,7 +1116,10 @@ pub fn upsert_prompt_source(input: &UpsertPromptSourceInput) -> Result Date: Sat, 25 Apr 2026 14:03:19 +0800 Subject: [PATCH 05/45] fix: improve Missing compiled prompt error with file path and add regression tests - Add entry.name to error messages in subagent, rule, command collectors/builders - Strengthen unit test assertions to verify file paths in error output - Copy missing .src.mdx to .mdx compiled prompts in aindex - Add trae smoke test file --- Cargo.lock | 18 +- Cargo.toml | 2 +- cli/local-tests/src/lib.rs | 10 + cli/local-tests/tests/trae_smoke.rs | 121 +++++++++ cli/npm/darwin-arm64/package.json | 2 +- cli/npm/darwin-x64/package.json | 2 +- cli/npm/linux-arm64-gnu/package.json | 2 +- cli/npm/linux-x64-gnu/package.json | 2 +- cli/npm/win32-x64-msvc/package.json | 2 +- cli/package.json | 12 +- doc/package.json | 2 +- gui/package.json | 2 +- gui/src-tauri/Cargo.toml | 2 +- gui/src-tauri/tauri.conf.json | 2 +- mcp/npm/darwin-arm64/package.json | 2 +- mcp/npm/darwin-x64/package.json | 2 +- mcp/npm/linux-arm64-gnu/package.json | 2 +- mcp/npm/linux-x64-gnu/package.json | 2 +- mcp/npm/win32-x64-msvc/package.json | 2 +- mcp/package.json | 12 +- package.json | 2 +- .../domain/output_plans/trae_output_plan.rs | 244 +++++++++++++++++- sdk/src/repositories/command.rs | 22 +- sdk/src/repositories/rule.rs | 22 +- sdk/src/repositories/skill.rs | 26 +- sdk/src/repositories/subagent.rs | 22 +- 26 files changed, 461 insertions(+), 80 deletions(-) create mode 100644 cli/local-tests/tests/trae_smoke.rs diff --git a/Cargo.lock b/Cargo.lock index 01e6c251..41ac9f74 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3062,7 +3062,7 @@ dependencies = [ [[package]] name = "memory-sync" -version = "2026.10425.10151" +version = "2026.10425.10602" dependencies = [ "tnmsc", ] @@ -6223,7 +6223,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tnmsc" -version = "2026.10425.10151" +version = "2026.10425.10602" dependencies = [ "clap", "serde_json", @@ -6232,7 +6232,7 @@ dependencies = [ [[package]] name = "tnmsc-integrate-tests" -version = "2026.10425.10151" +version = "2026.10425.10602" dependencies = [ "flate2", "serde_json", @@ -6242,7 +6242,7 @@ dependencies = [ [[package]] name = "tnmsc-local-tests" -version = "2026.10425.10151" +version = "2026.10425.10602" dependencies = [ "dirs", "json5", @@ -6251,7 +6251,7 @@ dependencies = [ [[package]] name = "tnmsd" -version = "2026.10425.10151" +version = "2026.10425.10602" dependencies = [ "base64 0.22.1", "chrono", @@ -6279,7 +6279,7 @@ dependencies = [ [[package]] name = "tnmsg" -version = "2026.10425.10151" +version = "2026.10425.10602" dependencies = [ "dirs", "proptest", @@ -6294,7 +6294,7 @@ dependencies = [ [[package]] name = "tnmsm" -version = "2026.10425.10151" +version = "2026.10425.10602" dependencies = [ "clap", "serde_json", @@ -6303,7 +6303,7 @@ dependencies = [ [[package]] name = "tnmsm-integrate-tests" -version = "2026.10425.10151" +version = "2026.10425.10602" dependencies = [ "serde_json", "testcontainers", @@ -7819,7 +7819,7 @@ dependencies = [ [[package]] name = "xtask" -version = "2026.10425.10151" +version = "2026.10425.10602" dependencies = [ "clap", "serde", diff --git a/Cargo.toml b/Cargo.toml index dddfa3e8..1cdc7c35 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,7 +29,7 @@ members = [ ] [workspace.package] -version = "2026.10425.10151" +version = "2026.10425.10602" edition = "2024" rust-version = "1.88" license = "AGPL-3.0-only" diff --git a/cli/local-tests/src/lib.rs b/cli/local-tests/src/lib.rs index f795b84a..41eea452 100644 --- a/cli/local-tests/src/lib.rs +++ b/cli/local-tests/src/lib.rs @@ -216,6 +216,16 @@ impl LocalTestRunner { fs::read_to_string(home_dir().join(".claude").join("CLAUDE.md")).ok() } + /// 检查项目级 .trae/steering/GLOBAL.md 是否存在。 + pub fn trae_steering_file_exists(&self) -> bool { + self.cwd.join(".trae").join("steering").join("GLOBAL.md").is_file() + } + + /// 检查项目级 .trae-cn/user_rules/GLOBAL.md 是否存在。 + pub fn trae_cn_file_exists(&self) -> bool { + self.cwd.join(".trae-cn").join("user_rules").join("GLOBAL.md").is_file() + } + /// 检查项目级 CLAUDE.md 是否存在。 pub fn claude_project_file_exists(&self) -> bool { self.cwd.join("CLAUDE.md").is_file() diff --git a/cli/local-tests/tests/trae_smoke.rs b/cli/local-tests/tests/trae_smoke.rs new file mode 100644 index 00000000..ef616bfb --- /dev/null +++ b/cli/local-tests/tests/trae_smoke.rs @@ -0,0 +1,121 @@ +//! 本地裸机 Trae 测试:验证 .trae/steering/GLOBAL.md 正确生成, +//! .trae-cn/ 不被输出,且清理时兼容清理旧的 .trae-cn/。 + +use std::fs; + +use tnmsc_local_tests::LocalTestRunner; + +#[test] +fn binary_exists_before_tests() { + let binary = tnmsc_local_tests::binary_path(); + assert!( + binary.is_file(), + "binary not found at: {}\n\nplease compile it first:\n cargo build -p tnmsc\n", + binary.display() + ); +} + +#[test] +fn local_trae_steering_generated_after_install() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + + let install = runner.install(); + install.assert_success("tnmsc install"); + + assert!( + runner.trae_steering_file_exists(), + ".trae/steering/GLOBAL.md should be generated after install, stdout:\n{}\nstderr:\n{}", + install.stdout, + install.stderr + ); + + assert!( + !runner.trae_cn_file_exists(), + ".trae-cn/user_rules/GLOBAL.md must NOT be generated after install" + ); +} + +#[test] +fn local_trae_steering_idempotent() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + + let first = runner.install(); + first.assert_success("first tnmsc install"); + assert!(runner.trae_steering_file_exists()); + + let content_first = + fs::read_to_string(runner.cwd().join(".trae").join("steering").join("GLOBAL.md")).unwrap(); + + let second = runner.install(); + second.assert_success("second tnmsc install"); + + let content_second = + fs::read_to_string(runner.cwd().join(".trae").join("steering").join("GLOBAL.md")).unwrap(); + + assert_eq!( + content_first, content_second, + "consecutive installs should produce identical .trae/steering/GLOBAL.md" + ); +} + +#[test] +fn local_trae_steering_removed_after_clean() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + + let install = runner.install(); + install.assert_success("tnmsc install"); + assert!(runner.trae_steering_file_exists()); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + assert!( + !runner.trae_steering_file_exists(), + ".trae/steering/GLOBAL.md should be removed after clean" + ); +} + +#[test] +fn local_trae_cn_cleaned_for_compatibility() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + + let install = runner.install(); + install.assert_success("tnmsc install"); + assert!(runner.trae_steering_file_exists()); + + // Simulate old-style .trae-cn/ output (should be cleaned up) + let trae_cn_path = runner.cwd().join(".trae-cn").join("user_rules").join("GLOBAL.md"); + fs::create_dir_all(trae_cn_path.parent().unwrap()).unwrap(); + fs::write(&trae_cn_path, "# legacy\n").unwrap(); + assert!(runner.trae_cn_file_exists(), "fake .trae-cn should exist before clean"); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean removes legacy .trae-cn"); + + assert!( + !runner.trae_cn_file_exists(), + "legacy .trae-cn/user_rules/GLOBAL.md should be removed during clean for compatibility" + ); + + // .trae/steering/GLOBAL.md should also be removed + assert!( + !runner.trae_steering_file_exists(), + ".trae/steering/GLOBAL.md should also be removed after clean" + ); +} diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json index 6258f449..9feb1685 100644 --- a/cli/npm/darwin-arm64/package.json +++ b/cli/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-arm64", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "tnmsc native binary for macOS arm64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json index edb63393..955d7ffd 100644 --- a/cli/npm/darwin-x64/package.json +++ b/cli/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-x64", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "tnmsc native binary for macOS x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json index a3d0c325..57c55833 100644 --- a/cli/npm/linux-arm64-gnu/package.json +++ b/cli/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-arm64-gnu", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "tnmsc native binary for Linux arm64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json index 243e6350..a58374c9 100644 --- a/cli/npm/linux-x64-gnu/package.json +++ b/cli/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-x64-gnu", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "tnmsc native binary for Linux x64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json index 1e29886d..61f75553 100644 --- a/cli/npm/win32-x64-msvc/package.json +++ b/cli/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-win32-x64-msvc", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "tnmsc native binary for Windows x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/package.json b/cli/package.json index 6ba5505c..707ebb5d 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "TrueNine Memory Synchronization CLI metadata package", "author": "TrueNine", "license": "AGPL-3.0-only", @@ -34,10 +34,10 @@ "test": "cargo test --manifest-path Cargo.toml" }, "optionalDependencies": { - "@truenine/memory-sync-cli-darwin-arm64": "2026.10425.10151", - "@truenine/memory-sync-cli-darwin-x64": "2026.10425.10151", - "@truenine/memory-sync-cli-linux-arm64-gnu": "2026.10425.10151", - "@truenine/memory-sync-cli-linux-x64-gnu": "2026.10425.10151", - "@truenine/memory-sync-cli-win32-x64-msvc": "2026.10425.10151" + "@truenine/memory-sync-cli-darwin-arm64": "2026.10425.10602", + "@truenine/memory-sync-cli-darwin-x64": "2026.10425.10602", + "@truenine/memory-sync-cli-linux-arm64-gnu": "2026.10425.10602", + "@truenine/memory-sync-cli-linux-x64-gnu": "2026.10425.10602", + "@truenine/memory-sync-cli-win32-x64-msvc": "2026.10425.10602" } } diff --git a/doc/package.json b/doc/package.json index 75056af1..34eb0d92 100644 --- a/doc/package.json +++ b/doc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-docs", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "private": true, "packageManager": "pnpm@10.33.0", "description": "Chinese-first manifesto-led documentation site for @truenine/memory-sync.", diff --git a/gui/package.json b/gui/package.json index b5a5cabb..9d68fdc2 100644 --- a/gui/package.json +++ b/gui/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-gui", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "private": true, "engines": { "node": ">= 22" diff --git a/gui/src-tauri/Cargo.toml b/gui/src-tauri/Cargo.toml index 06405893..8c683271 100644 --- a/gui/src-tauri/Cargo.toml +++ b/gui/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tnmsg" -version = "2026.10425.10151" +version = "2026.10425.10602" description = "Memory Sync desktop GUI application" authors.workspace = true edition.workspace = true diff --git a/gui/src-tauri/tauri.conf.json b/gui/src-tauri/tauri.conf.json index 7e6f1c67..fae937f9 100644 --- a/gui/src-tauri/tauri.conf.json +++ b/gui/src-tauri/tauri.conf.json @@ -1,6 +1,6 @@ { "$schema": "https://schema.tauri.app/config/2", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "productName": "Memory Sync", "identifier": "org.truenine.memory-sync", "build": { diff --git a/mcp/npm/darwin-arm64/package.json b/mcp/npm/darwin-arm64/package.json index bd246f0e..9f74a551 100644 --- a/mcp/npm/darwin-arm64/package.json +++ b/mcp/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-darwin-arm64", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "tnmsm native binary for macOS arm64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/darwin-x64/package.json b/mcp/npm/darwin-x64/package.json index cafdbdc6..36719faf 100644 --- a/mcp/npm/darwin-x64/package.json +++ b/mcp/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-darwin-x64", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "tnmsm native binary for macOS x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/linux-arm64-gnu/package.json b/mcp/npm/linux-arm64-gnu/package.json index 3bc21c57..36a4fd49 100644 --- a/mcp/npm/linux-arm64-gnu/package.json +++ b/mcp/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-linux-arm64-gnu", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "tnmsm native binary for Linux arm64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/linux-x64-gnu/package.json b/mcp/npm/linux-x64-gnu/package.json index 3545938a..73a118ff 100644 --- a/mcp/npm/linux-x64-gnu/package.json +++ b/mcp/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-linux-x64-gnu", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "tnmsm native binary for Linux x64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/win32-x64-msvc/package.json b/mcp/npm/win32-x64-msvc/package.json index aa716f6d..089c4834 100644 --- a/mcp/npm/win32-x64-msvc/package.json +++ b/mcp/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-win32-x64-msvc", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "tnmsm native binary for Windows x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/package.json b/mcp/package.json index 2469895f..c9c34ae9 100644 --- a/mcp/package.json +++ b/mcp/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "TrueNine Memory Sync MCP metadata package", "author": "TrueNine", "license": "AGPL-3.0-only", @@ -32,10 +32,10 @@ "test": "cargo test --manifest-path Cargo.toml" }, "optionalDependencies": { - "@truenine/memory-sync-mcp-darwin-arm64": "2026.10425.10151", - "@truenine/memory-sync-mcp-darwin-x64": "2026.10425.10151", - "@truenine/memory-sync-mcp-linux-arm64-gnu": "2026.10425.10151", - "@truenine/memory-sync-mcp-linux-x64-gnu": "2026.10425.10151", - "@truenine/memory-sync-mcp-win32-x64-msvc": "2026.10425.10151" + "@truenine/memory-sync-mcp-darwin-arm64": "2026.10425.10602", + "@truenine/memory-sync-mcp-darwin-x64": "2026.10425.10602", + "@truenine/memory-sync-mcp-linux-arm64-gnu": "2026.10425.10602", + "@truenine/memory-sync-mcp-linux-x64-gnu": "2026.10425.10602", + "@truenine/memory-sync-mcp-win32-x64-msvc": "2026.10425.10602" } } diff --git a/package.json b/package.json index 96f83d20..7d03cdf9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync", - "version": "2026.10425.10151", + "version": "2026.10425.10602", "description": "Cross-AI-tool prompt synchronisation toolkit (CLI + Tauri desktop GUI) — one ruleset, multi-target adaptation. Monorepo powered by pnpm + Turbo.", "license": "AGPL-3.0-only", "keywords": [ diff --git a/sdk/src/domain/output_plans/trae_output_plan.rs b/sdk/src/domain/output_plans/trae_output_plan.rs index 353958bd..09a837ce 100644 --- a/sdk/src/domain/output_plans/trae_output_plan.rs +++ b/sdk/src/domain/output_plans/trae_output_plan.rs @@ -75,17 +75,6 @@ fn build_output_files( .to_string_lossy() .into_owned(), scope: Some(PROJECT_SCOPE.to_string()), - content: content.clone(), - encoding: None, - }); - - let trae_cn_dir = project_root_dir.join(".trae-cn").join("user_rules"); - output_files.push(BaseOutputFileDeclarationDto { - path: trae_cn_dir - .join(TRAE_CN_USER_RULES_FILE) - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), content, encoding: None, }); @@ -193,3 +182,236 @@ fn resolve_relative_path(rp: &RelativePath) -> PathBuf { } PathBuf::from(&rp.base_path).join(raw) } + +#[cfg(test)] +mod tests { + use tempfile::TempDir; + + use super::*; + use crate::domain::plugin_shared::{ + FilePathKind, GlobalMemoryPrompt, ProjectRootMemoryPrompt, PromptKind, RootPath, + }; + + fn create_relative_path(base_path: &str, path: &str) -> RelativePath { + RelativePath::new(path, base_path) + } + + fn create_root_prompt(content: &str) -> ProjectRootMemoryPrompt { + ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Root, + dir: RootPath::new(""), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + } + } + + fn create_global_memory(content: &str) -> GlobalMemoryPrompt { + GlobalMemoryPrompt { + prompt_type: PromptKind::GlobalMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Relative, + dir: create_relative_path("/home", ".trae"), + raw_front_matter: None, + markdown_contents: None, + parent_directory_path: None, + raw_content: None, + } + } + + fn create_project(workspace_root: &str, name: &str) -> Project { + Project { + name: Some(name.to_string()), + dir_from_workspace_path: Some(create_relative_path(workspace_root, name)), + ..Project::default() + } + } + + #[test] + fn trae_output_contains_only_steering_not_trae_cn() { + let temp_dir = TempDir::new().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new(&workspace_dir.to_string_lossy()), + projects: vec![ + Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(create_root_prompt("workspace root")), + ..Project::default() + }, + Project { + is_prompt_source_project: Some(true), + root_memory_prompt: Some(create_root_prompt("prompt source")), + ..create_project(&workspace_dir.to_string_lossy(), "aindex") + }, + Project { + root_memory_prompt: Some(create_root_prompt("project root")), + ..create_project(&workspace_dir.to_string_lossy(), "project-a") + }, + Project { + root_memory_prompt: Some(create_root_prompt("project root")), + ..create_project(&workspace_dir.to_string_lossy(), "project-a") + }, + ], + }), + global_memory: Some(create_global_memory("global prompt")), + registered_output_plugins: Some(vec![AGENTS_OUTPUT_ADAPTOR.to_string()]), + ..OutputContext::default() + }; + + let plan = build_trae_output_plan(&context).unwrap(); + let output_paths: Vec<&str> = plan + .output_files + .iter() + .map(|f| f.path.as_str()) + .collect(); + + assert!( + output_paths.contains( + &workspace_dir + .join(".trae") + .join("steering") + .join("GLOBAL.md") + .to_string_lossy() + .as_ref() + ), + "output must include .trae/steering/GLOBAL.md" + ); + + assert!( + !output_paths + .iter() + .any(|p| p.contains(".trae-cn")), + "output must NOT include any .trae-cn path, got: {:?}", + output_paths + ); + } + + #[test] + fn trae_output_omits_prompt_source_projects() { + let temp_dir = TempDir::new().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new(&workspace_dir.to_string_lossy()), + projects: vec![ + Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(create_root_prompt("workspace root")), + ..Project::default() + }, + Project { + is_prompt_source_project: Some(true), + root_memory_prompt: Some(create_root_prompt("prompt source")), + ..create_project(&workspace_dir.to_string_lossy(), "aindex") + }, + Project { + root_memory_prompt: Some(create_root_prompt("project root")), + ..create_project(&workspace_dir.to_string_lossy(), "project-a") + }, + ], + }), + ..OutputContext::default() + }; + + let plan = build_trae_output_plan(&context).unwrap(); + let output_paths: Vec<&str> = plan + .output_files + .iter() + .map(|f| f.path.as_str()) + .collect(); + + assert!( + output_paths.contains( + &workspace_dir + .join("project-a") + .join(".trae") + .join("steering") + .join("GLOBAL.md") + .to_string_lossy() + .as_ref() + ), + "output must include project-a/.trae/steering/GLOBAL.md" + ); + + assert!( + !output_paths.contains( + &workspace_dir + .join("aindex") + .join(".trae") + .join("steering") + .join("GLOBAL.md") + .to_string_lossy() + .as_ref() + ), + "output must NOT include prompt source project" + ); + } + + #[test] + fn trae_cleanup_still_removes_trae_cn_for_compatibility() { + let temp_dir = TempDir::new().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let project_root = workspace_dir.join("project-a"); + + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new(&workspace_dir.to_string_lossy()), + projects: vec![ + Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + ..Project::default() + }, + Project { + ..create_project(&workspace_dir.to_string_lossy(), "project-a") + }, + ], + }), + ..OutputContext::default() + }; + + let plan = build_trae_output_plan(&context).unwrap(); + let cleanup_paths: Vec<&str> = plan + .cleanup + .delete + .iter() + .map(|t| t.path.as_str()) + .collect(); + + assert!( + cleanup_paths.contains( + &project_root + .join(".trae-cn") + .join("user_rules") + .join("GLOBAL.md") + .to_string_lossy() + .as_ref() + ), + "cleanup must still include .trae-cn/user_rules/GLOBAL.md for backward compatibility, got: {:?}", + cleanup_paths + ); + + assert!( + cleanup_paths.contains( + &project_root + .join(".trae") + .join("steering") + .join("GLOBAL.md") + .to_string_lossy() + .as_ref() + ), + "cleanup must include .trae/steering/GLOBAL.md, got: {:?}", + cleanup_paths + ); + } +} diff --git a/sdk/src/repositories/command.rs b/sdk/src/repositories/command.rs index 496051bf..978e4c51 100644 --- a/sdk/src/repositories/command.rs +++ b/sdk/src/repositories/command.rs @@ -51,7 +51,12 @@ fn build_command_prompt( let compiled = entry .compiled .as_ref() - .ok_or_else(|| crate::CliError::ConfigError("Missing compiled prompt".to_string()))?; + .ok_or_else(|| { + crate::CliError::ConfigError(format!( + "Missing compiled prompt: {}.mdx", + entry.name + )) + })?; let file_path = format!("{}/{}.mdx", dir, entry.name); validate_command_metadata(&compiled.metadata, &file_path) @@ -135,9 +140,10 @@ pub fn collect_command(options_json: &str) -> Result { let mut prompts: Vec = Vec::new(); for entry in &entries { if entry.compiled.is_none() && (entry.src_zh.is_some() || entry.src_en.is_some()) { - return Err(crate::CliError::ConfigError( - "Missing compiled prompt".to_string(), - )); + return Err(crate::CliError::ConfigError(format!( + "Missing compiled prompt: {}.mdx", + entry.name + ))); } if entry.compiled.is_some() { prompts.push(build_command_prompt(entry, &dir_str)?); @@ -245,11 +251,11 @@ mod tests { let result = collect_command(&options.to_string()); assert!(result.is_err()); + let err = result.unwrap_err().to_string(); assert!( - result - .unwrap_err() - .to_string() - .contains("Missing compiled prompt") + err.contains("Missing compiled prompt: demo.mdx"), + "expected file path in error message, got: {}", + err ); } diff --git a/sdk/src/repositories/rule.rs b/sdk/src/repositories/rule.rs index d2866101..7856b810 100644 --- a/sdk/src/repositories/rule.rs +++ b/sdk/src/repositories/rule.rs @@ -95,7 +95,12 @@ fn build_rule_prompt( let compiled = entry .compiled .as_ref() - .ok_or_else(|| crate::CliError::ConfigError("Missing compiled prompt".to_string()))?; + .ok_or_else(|| { + crate::CliError::ConfigError(format!( + "Missing compiled prompt: {}.mdx", + entry.name + )) + })?; let file_path = format!("{}/{}.mdx", dir, entry.name); validate_rule_metadata(&compiled.metadata, &file_path).map_err(crate::CliError::ConfigError)?; @@ -196,9 +201,10 @@ pub fn collect_rule(options_json: &str) -> Result { let mut prompts: Vec = Vec::new(); for entry in &entries { if entry.compiled.is_none() && (entry.src_zh.is_some() || entry.src_en.is_some()) { - return Err(crate::CliError::ConfigError( - "Missing compiled prompt".to_string(), - )); + return Err(crate::CliError::ConfigError(format!( + "Missing compiled prompt: {}.mdx", + entry.name + ))); } if entry.compiled.is_some() { prompts.push(build_rule_prompt(entry, &dir_str)?); @@ -238,11 +244,11 @@ mod tests { let result = collect_rule(&options.to_string()); assert!(result.is_err()); + let err = result.unwrap_err().to_string(); assert!( - result - .unwrap_err() - .to_string() - .contains("Missing compiled prompt") + err.contains("Missing compiled prompt: qa/boot.mdx"), + "expected file path in error message, got: {}", + err ); } diff --git a/sdk/src/repositories/skill.rs b/sdk/src/repositories/skill.rs index 9db1cf60..3e70c122 100644 --- a/sdk/src/repositories/skill.rs +++ b/sdk/src/repositories/skill.rs @@ -977,11 +977,16 @@ mod tests { let result = collect_skill(&options.to_string()); assert!(result.is_err()); + let err = result.unwrap_err().to_string(); assert!( - result - .unwrap_err() - .to_string() - .contains("Missing compiled prompt") + err.contains("Missing compiled prompt for skill child doc"), + "expected detailed error message with path, got: {}", + err + ); + assert!( + err.contains("guide.src.mdx"), + "expected source path in error: {}", + err ); } @@ -1003,11 +1008,16 @@ mod tests { let result = collect_skill(&options.to_string()); assert!(result.is_err()); + let err = result.unwrap_err().to_string(); assert!( - result - .unwrap_err() - .to_string() - .contains("Missing compiled prompt") + err.contains("Missing compiled prompt for skill"), + "expected detailed error message with path, got: {}", + err + ); + assert!( + err.contains("skill.src.mdx"), + "expected source path in error: {}", + err ); } diff --git a/sdk/src/repositories/subagent.rs b/sdk/src/repositories/subagent.rs index 791ce3af..a4936158 100644 --- a/sdk/src/repositories/subagent.rs +++ b/sdk/src/repositories/subagent.rs @@ -84,7 +84,12 @@ fn build_subagent_prompt( let compiled = entry .compiled .as_ref() - .ok_or_else(|| crate::CliError::ConfigError("Missing compiled prompt".to_string()))?; + .ok_or_else(|| { + crate::CliError::ConfigError(format!( + "Missing compiled prompt: {}.mdx", + entry.name + )) + })?; let file_path = format!("{}/{}.mdx", dir, entry.name); validate_subagent_metadata(&compiled.metadata, &file_path) @@ -158,9 +163,10 @@ pub fn collect_subagent(options_json: &str) -> Result { let mut diagnostics: Vec = Vec::new(); for entry in &entries { if entry.compiled.is_none() && (entry.src_zh.is_some() || entry.src_en.is_some()) { - return Err(crate::CliError::ConfigError( - "Missing compiled prompt".to_string(), - )); + return Err(crate::CliError::ConfigError(format!( + "Missing compiled prompt: {}.mdx", + entry.name + ))); } if entry.compiled.is_some() { prompts.push(build_subagent_prompt(entry, &dir_str, &mut diagnostics)?); @@ -327,11 +333,11 @@ mod tests { let result = collect_subagent(&options.to_string()); assert!(result.is_err()); + let err = result.unwrap_err().to_string(); assert!( - result - .unwrap_err() - .to_string() - .contains("Missing compiled prompt") + err.contains("Missing compiled prompt: demo.mdx"), + "expected file path in error message, got: {}", + err ); } From 17efc57458a7a04940856ff31dd2f583bb52ecf5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 27 Apr 2026 14:23:30 +0800 Subject: [PATCH 06/45] refactor: rename FastCommand to SlashCommand and update related structures and usages --- cli/local-tests/tests/codex_smoke.rs | 57 ++++++ sdk/src/context/output_context.rs | 4 +- sdk/src/domain/mod.rs | 2 +- sdk/src/domain/output_context.rs | 4 +- .../output_plans/claude_code_output_plan.rs | 149 ++++++++------ .../domain/output_plans/codex_output_plan.rs | 154 +++++++++------ .../domain/output_plans/droid_output_plan.rs | 183 ++++++++++-------- .../output_plans/opencode_output_plan.rs | 146 +++++++++----- sdk/src/domain/plugin_shared.rs | 14 +- sdk/src/repositories/command.rs | 14 +- sdk/src/services/common.rs | 4 +- sdk/src/services/dry_run_service.rs | 2 +- sdk/src/services/install_service.rs | 2 +- 13 files changed, 468 insertions(+), 267 deletions(-) diff --git a/cli/local-tests/tests/codex_smoke.rs b/cli/local-tests/tests/codex_smoke.rs index cb7db7f7..5a3e7da6 100644 --- a/cli/local-tests/tests/codex_smoke.rs +++ b/cli/local-tests/tests/codex_smoke.rs @@ -484,3 +484,60 @@ fn local_codex_dry_run_does_not_write() { ".codex/ should not be created by dry-run" ); } + +/// Regression test: skill output directories should only contain SKILL.md. +/// +/// Prior bug: resources, child docs, and mcp config files were rendered inside +/// each skill directory (e.g. act/act/, references/*), creating incorrect +/// nested structures and duplicate content. +/// +/// Note: only checks plugins generated by `tnmsc install` (codex, opencode, +/// claude). droid (.factory/skills) is a separate plugin not emitted by +/// install; it has its own `skill_output_only_contains_skill_md` unit test. +#[test] +fn regression_skill_output_only_contains_skill_md() { + assert_codex_plugin_enabled(); + + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + + let install = runner.install(); + install.assert_success("tnmsc install"); + + for (label, skills_dir) in [ + ("codex", runner.cwd().join(".codex").join("skills")), + ("opencode", runner.cwd().join(".opencode").join("skills")), + ("claude", runner.cwd().join(".claude").join("skills")), + ] { + assert!( + skills_dir.is_dir(), + "{label} skills dir should exist: {}", + skills_dir.display() + ); + + for entry in std::fs::read_dir(&skills_dir).unwrap().flatten() { + if !entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { + continue; + } + let skill_name = entry.file_name().to_string_lossy().to_string(); + let skill_dir = entry.path(); + + let mut entries: Vec<_> = std::fs::read_dir(&skill_dir) + .unwrap() + .flatten() + .map(|e| e.file_name().to_string_lossy().to_string()) + .collect(); + entries.sort(); + + assert_eq!( + entries, + vec!["SKILL.md"], + "{label}/skills/{skill_name} should only contain SKILL.md, got: {:?}", + entries + ); + } + } +} diff --git a/sdk/src/context/output_context.rs b/sdk/src/context/output_context.rs index 7bd235e3..4e957666 100644 --- a/sdk/src/context/output_context.rs +++ b/sdk/src/context/output_context.rs @@ -1,7 +1,7 @@ use serde::{Deserialize, Serialize}; use crate::domain::plugin_shared::{ - AIAgentIgnoreConfigFile, FastCommandPrompt, GlobalMemoryPrompt, ProjectIDEConfigFile, + AIAgentIgnoreConfigFile, SlashCommandPrompt, GlobalMemoryPrompt, ProjectIDEConfigFile, ReadmePrompt, RulePrompt, SkillPrompt, SubAgentPrompt, Workspace, }; @@ -19,7 +19,7 @@ pub struct OutputContext { #[serde(default, skip_serializing_if = "Option::is_none")] pub editor_config_files: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] - pub fast_commands: Option>, + pub slash_commands: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sub_agents: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] diff --git a/sdk/src/domain/mod.rs b/sdk/src/domain/mod.rs index 6f46f85d..1f9d9faa 100644 --- a/sdk/src/domain/mod.rs +++ b/sdk/src/domain/mod.rs @@ -13,7 +13,7 @@ pub use cleanup::{ pub use config::{ConfigLoader, MergedConfigResult, PluginsConfig, UserConfigFile}; pub use output_context::OutputContext; pub use plugin_shared::{ - AIAgentIgnoreConfigFile, FastCommandPrompt, GlobalMemoryPrompt, IDEKind, NamingCaseKind, + AIAgentIgnoreConfigFile, SlashCommandPrompt, GlobalMemoryPrompt, IDEKind, NamingCaseKind, PluginKind, Project, ProjectIDEConfigFile, PromptKind, ReadmePrompt, RelativePath, RulePrompt, RuleScope, SkillPrompt, SubAgentPrompt, Workspace, }; diff --git a/sdk/src/domain/output_context.rs b/sdk/src/domain/output_context.rs index 7bd235e3..4e957666 100644 --- a/sdk/src/domain/output_context.rs +++ b/sdk/src/domain/output_context.rs @@ -1,7 +1,7 @@ use serde::{Deserialize, Serialize}; use crate::domain::plugin_shared::{ - AIAgentIgnoreConfigFile, FastCommandPrompt, GlobalMemoryPrompt, ProjectIDEConfigFile, + AIAgentIgnoreConfigFile, SlashCommandPrompt, GlobalMemoryPrompt, ProjectIDEConfigFile, ReadmePrompt, RulePrompt, SkillPrompt, SubAgentPrompt, Workspace, }; @@ -19,7 +19,7 @@ pub struct OutputContext { #[serde(default, skip_serializing_if = "Option::is_none")] pub editor_config_files: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] - pub fast_commands: Option>, + pub slash_commands: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sub_agents: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] diff --git a/sdk/src/domain/output_plans/claude_code_output_plan.rs b/sdk/src/domain/output_plans/claude_code_output_plan.rs index 050dd0f9..45d5f6ea 100644 --- a/sdk/src/domain/output_plans/claude_code_output_plan.rs +++ b/sdk/src/domain/output_plans/claude_code_output_plan.rs @@ -162,64 +162,11 @@ fn build_output_files( content: build_skill_content(skill), encoding: None, }); - - // Child docs - if let Some(child_docs) = skill.child_docs.as_ref() { - for child_doc in child_docs { - let child_path = child_doc - .relative_path - .replace(".mdx", ".md") - .replace(".src.md", ".md"); - output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir - .join(&child_path) - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: child_doc.content.clone(), - encoding: None, - }); - } - } - - // Resources - if let Some(resources) = skill.resources.as_ref() { - for resource in resources { - let encoding = match resource.encoding { - crate::domain::plugin_shared::SkillResourceEncoding::Base64 => { - Some("base64".to_string()) - } - crate::domain::plugin_shared::SkillResourceEncoding::Text => None, - }; - output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir - .join(&resource.relative_path) - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: resource.content.clone(), - encoding, - }); - } - } - - // MCP config - if let Some(mcp_config) = skill.mcp_config.as_ref() { - output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir - .join("mcp.json") - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: mcp_config.raw_content.clone(), - encoding: None, - }); - } } } } - if let Some(commands) = context.fast_commands.as_ref() { + if let Some(commands) = context.slash_commands.as_ref() { for project in &project_output_projects { let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { continue; @@ -327,7 +274,7 @@ fn build_agent_content(agent: &crate::domain::plugin_shared::SubAgentPrompt) -> wrap_yaml_front_matter(&metadata, &agent.content) } -fn build_command_content(command: &crate::domain::plugin_shared::FastCommandPrompt) -> String { +fn build_command_content(command: &crate::domain::plugin_shared::SlashCommandPrompt) -> String { let mut metadata = if let Some(ref yaml_fm) = command.yaml_front_matter { match serde_json::to_value(yaml_fm) { Ok(Value::Object(map)) => map, @@ -484,6 +431,98 @@ mod tests { "Global memory from aindex\n\nWorkspace root prompt from aindex" ); } + + fn make_test_skill(name: &str) -> crate::domain::plugin_shared::SkillPrompt { + use crate::domain::plugin_shared::*; + SkillPrompt { + prompt_type: PromptKind::Skill, + content: "body".to_string(), + length: 4, + skill_name: name.to_string(), + dir: crate::infra::path_types::RelativePath::new(name, "/workspace/aindex/skills"), + yaml_front_matter: Some(SkillYAMLFrontMatter { + description: Some("desc".to_string()), + ..SkillYAMLFrontMatter::default() + }), + child_docs: Some(vec![SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "guide".to_string(), + length: 5, + file_path_kind: crate::infra::path_types::FilePathKind::Relative, + relative_path: "guide.mdx".to_string(), + dir: crate::infra::path_types::RelativePath::new("guide.mdx", "/workspace/aindex/skills/test"), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }]), + resources: Some(vec![SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "txt".to_string(), + file_name: "notes.txt".to_string(), + relative_path: "assets/notes.txt".to_string(), + content: "notes".to_string(), + encoding: SkillResourceEncoding::Text, + length: 5, + mime_type: None, + }]), + mcp_config: Some(SkillMcpConfig { + prompt_type: PromptKind::SkillMcpConfig, + mcp_servers: std::collections::HashMap::new(), + raw_content: "{}".to_string(), + }), + markdown_contents: None, + } + } + + #[test] + fn skill_output_only_contains_skill_md() { + use crate::domain::plugin_shared::*; + + let skill = make_test_skill("test-skill"); + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new("/workspace"), + projects: vec![Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: "root".to_string(), + length: 4, + file_path_kind: FilePathKind::Root, + dir: RootPath::new("/workspace"), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }), + ..Project::default() + }], + }), + skills: Some(vec![skill]), + ..OutputContext::default() + }; + + let plan = build_claude_code_output_plan(&context).unwrap(); + let skill_paths: Vec<&str> = plan + .output_files + .iter() + .map(|f| f.path.as_str()) + .filter(|p| p.contains(".claude/skills/test-skill")) + .collect(); + + assert_eq!( + skill_paths.len(), + 1, + "should only have SKILL.md, got: {:?}", + skill_paths + ); + assert!( + skill_paths[0].ends_with("SKILL.md"), + "output should be SKILL.md, got: {}", + skill_paths[0] + ); + } } fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { diff --git a/sdk/src/domain/output_plans/codex_output_plan.rs b/sdk/src/domain/output_plans/codex_output_plan.rs index 3bd072b0..146a2397 100644 --- a/sdk/src/domain/output_plans/codex_output_plan.rs +++ b/sdk/src/domain/output_plans/codex_output_plan.rs @@ -81,7 +81,7 @@ fn build_output_files( } // Global ~/.codex/prompts/ (from commands) - if let Some(commands) = context.fast_commands.as_ref() { + if let Some(commands) = context.slash_commands.as_ref() { let codex_prompts_dir = resolve_effective_home_dir() .join(CODEX_GLOBAL_CONFIG_DIR) .join(CODEX_PROMPTS_DIR); @@ -135,7 +135,6 @@ fn build_output_files( for skill in skills { let skill_sub_dir = codex_skills_dir.join(&skill.skill_name); - // Main SKILL.md with YAML front matter output_files.push(BaseOutputFileDeclarationDto { path: skill_sub_dir .join("SKILL.md") @@ -145,59 +144,6 @@ fn build_output_files( content: build_skill_content(skill), encoding: None, }); - - // Child docs - if let Some(child_docs) = skill.child_docs.as_ref() { - for child_doc in child_docs { - let child_path = child_doc - .relative_path - .replace(".mdx", ".md") - .replace(".src.md", ".md"); - output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir - .join(&child_path) - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: child_doc.content.clone(), - encoding: None, - }); - } - } - - // Resources - if let Some(resources) = skill.resources.as_ref() { - for resource in resources { - let encoding = match resource.encoding { - crate::domain::plugin_shared::SkillResourceEncoding::Base64 => { - Some("base64".to_string()) - } - crate::domain::plugin_shared::SkillResourceEncoding::Text => None, - }; - output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir - .join(&resource.relative_path) - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: resource.content.clone(), - encoding, - }); - } - } - - // MCP config - if let Some(mcp_config) = skill.mcp_config.as_ref() { - output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir - .join("mcp.json") - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: mcp_config.raw_content.clone(), - encoding: None, - }); - } } } } @@ -253,7 +199,7 @@ fn build_agent_toml_content(agent: &crate::domain::plugin_shared::SubAgentPrompt }) } -fn build_command_content(command: &crate::domain::plugin_shared::FastCommandPrompt) -> String { +fn build_command_content(command: &crate::domain::plugin_shared::SlashCommandPrompt) -> String { let metadata = if let Some(ref yaml_fm) = command.yaml_front_matter { match serde_json::to_value(yaml_fm) { Ok(serde_json::Value::Object(map)) => map, @@ -561,3 +507,99 @@ fn resolve_relative_path(rp: &RelativePath) -> PathBuf { } PathBuf::from(&rp.base_path).join(raw) } + +#[cfg(test)] +mod tests { + use super::*; + use crate::domain::plugin_shared::*; + use crate::infra::path_types::*; + + fn make_test_skill(name: &str) -> SkillPrompt { + SkillPrompt { + prompt_type: PromptKind::Skill, + content: "body".to_string(), + length: 4, + skill_name: name.to_string(), + dir: RelativePath::new(name, "/workspace/aindex/skills"), + yaml_front_matter: Some(SkillYAMLFrontMatter { + description: Some("desc".to_string()), + ..SkillYAMLFrontMatter::default() + }), + child_docs: Some(vec![SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "guide".to_string(), + length: 5, + file_path_kind: FilePathKind::Relative, + relative_path: "guide.mdx".to_string(), + dir: RelativePath::new("guide.mdx", "/workspace/aindex/skills/test"), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }]), + resources: Some(vec![SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "txt".to_string(), + file_name: "notes.txt".to_string(), + relative_path: "assets/notes.txt".to_string(), + content: "notes".to_string(), + encoding: SkillResourceEncoding::Text, + length: 5, + mime_type: None, + }]), + mcp_config: Some(SkillMcpConfig { + prompt_type: PromptKind::SkillMcpConfig, + mcp_servers: std::collections::HashMap::new(), + raw_content: "{}".to_string(), + }), + markdown_contents: None, + } + } + + #[test] + fn skill_output_only_contains_skill_md() { + let skill = make_test_skill("test-skill"); + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new("/workspace"), + projects: vec![Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: "root".to_string(), + length: 4, + file_path_kind: FilePathKind::Root, + dir: RootPath::new("/workspace"), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }), + ..Project::default() + }], + }), + skills: Some(vec![skill]), + ..OutputContext::default() + }; + + let plan = build_codex_output_plan(&context).unwrap(); + let skill_paths: Vec<&str> = plan + .output_files + .iter() + .map(|f| f.path.as_str()) + .filter(|p| p.contains(".codex/skills/test-skill")) + .collect(); + + assert_eq!( + skill_paths.len(), + 1, + "should only have SKILL.md, got: {:?}", + skill_paths + ); + assert!( + skill_paths[0].ends_with("SKILL.md"), + "output should be SKILL.md, got: {}", + skill_paths[0] + ); + } +} diff --git a/sdk/src/domain/output_plans/droid_output_plan.rs b/sdk/src/domain/output_plans/droid_output_plan.rs index d7dafc3b..05f7b54e 100644 --- a/sdk/src/domain/output_plans/droid_output_plan.rs +++ b/sdk/src/domain/output_plans/droid_output_plan.rs @@ -9,8 +9,7 @@ use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTa use crate::domain::config; use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::{ - FastCommandPrompt, Project, RelativePath, RuleScope, SkillPrompt, SkillResourceEncoding, - Workspace, + SlashCommandPrompt, Project, RelativePath, RuleScope, SkillPrompt, Workspace, }; const DROID_PLUGIN_NAME: &str = "DroidCLIOutputAdaptor"; @@ -119,7 +118,7 @@ fn append_command_output_files( workspace: &Workspace, context: &OutputContext, ) -> Result<(), CliError> { - let commands = context.fast_commands.as_deref().unwrap_or(&[]); + let commands = context.slash_commands.as_deref().unwrap_or(&[]); let Some(selected_scope) = select_single_scope(commands.iter().map(resolve_command_scope)) else { return Ok(()); }; @@ -225,27 +224,6 @@ fn append_skill_files_for_scope( Some(scope), build_skill_main_content(skill)?, )); - - if let Some(child_docs) = skill.child_docs.as_ref() { - for child_doc in child_docs { - output_files.push(create_text_output_file( - skill_dir.join(transform_child_doc_path(&child_doc.relative_path)), - Some(scope), - child_doc.content.clone(), - )); - } - } - - if let Some(resources) = skill.resources.as_ref() { - for resource in resources { - output_files.push(create_resource_output_file( - skill_dir.join(&resource.relative_path), - Some(scope), - resource.content.clone(), - resource.encoding, - )); - } - } } Ok(()) @@ -399,25 +377,6 @@ fn create_text_output_file( } } -fn create_resource_output_file( - path: PathBuf, - scope: Option<&str>, - content: String, - encoding: SkillResourceEncoding, -) -> DroidOutputFileDeclarationDto { - let encoding = match encoding { - SkillResourceEncoding::Text => "text", - SkillResourceEncoding::Base64 => "base64", - }; - - DroidOutputFileDeclarationDto { - path: path.to_string_lossy().into_owned(), - scope: scope.map(str::to_string), - content, - encoding: Some(encoding.to_string()), - } -} - fn create_cleanup_target( path: PathBuf, kind: CleanupTargetKindDto, @@ -457,10 +416,10 @@ fn select_single_scope( } fn filter_commands_for_project<'a>( - commands: &'a [FastCommandPrompt], + commands: &'a [SlashCommandPrompt], project_config: Option<&Value>, selected_scope: OutputSelectionScope, -) -> Vec<&'a FastCommandPrompt> { +) -> Vec<&'a SlashCommandPrompt> { let effective_include_series = resolve_effective_include_series(project_config, "commands"); commands @@ -491,7 +450,7 @@ fn filter_skills_for_project<'a>( .collect() } -fn resolve_command_scope(command: &FastCommandPrompt) -> OutputSelectionScope { +fn resolve_command_scope(command: &SlashCommandPrompt) -> OutputSelectionScope { if command.global_only == Some(true) { return OutputSelectionScope::Global; } @@ -586,7 +545,7 @@ fn resolve_skill_extra_value<'a>(skill: &'a SkillPrompt, key: &str) -> Option<&' .and_then(|front_matter| front_matter.extra.get(key)) } -fn transform_command_name(command: &FastCommandPrompt) -> String { +fn transform_command_name(command: &SlashCommandPrompt) -> String { match command.series.as_deref() { Some(series) if !series.is_empty() => format!("{series}-{}.md", command.command_name), _ => format!("{}.md", command.command_name), @@ -601,14 +560,7 @@ fn resolve_skill_dir_name(skill: &SkillPrompt) -> String { skill.dir.get_directory_name() } -fn transform_child_doc_path(relative_path: &str) -> String { - match relative_path.strip_suffix(".mdx") { - Some(prefix) => format!("{prefix}.md"), - None => relative_path.to_string(), - } -} - -fn build_command_content(command: &FastCommandPrompt) -> Result { +fn build_command_content(command: &SlashCommandPrompt) -> Result { let front_matter = command .yaml_front_matter .as_ref() @@ -671,9 +623,9 @@ mod tests { use super::*; use crate::domain::plugin_shared::{ - FastCommandYAMLFrontMatter, FilePathKind, GlobalMemoryPrompt, ProjectChildrenMemoryPrompt, + SlashCommandYAMLFrontMatter, FilePathKind, GlobalMemoryPrompt, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt, PromptKind, RootPath, SkillChildDoc, SkillResource, - SkillYAMLFrontMatter, + SkillResourceEncoding, SkillYAMLFrontMatter, }; fn create_relative_path(base_path: &str, path: &str) -> RelativePath { @@ -741,9 +693,9 @@ mod tests { name: &str, series: &str, content: &str, - ) -> FastCommandPrompt { - FastCommandPrompt { - prompt_type: PromptKind::FastCommand, + ) -> SlashCommandPrompt { + SlashCommandPrompt { + prompt_type: PromptKind::SlashCommand, content: content.to_string(), length: content.len(), dir: create_relative_path(project_root, &format!("commands/{name}.mdx")), @@ -751,18 +703,18 @@ mod tests { series: Some(series.to_string()), seri_name: Some(series.to_string()), global_only: None, - yaml_front_matter: Some(FastCommandYAMLFrontMatter { + yaml_front_matter: Some(SlashCommandYAMLFrontMatter { description: Some(format!("{name} description")), - ..FastCommandYAMLFrontMatter::default() + ..SlashCommandYAMLFrontMatter::default() }), raw_mdx_content: None, markdown_contents: None, } } - fn create_global_command(project_root: &str, name: &str, content: &str) -> FastCommandPrompt { - FastCommandPrompt { - prompt_type: PromptKind::FastCommand, + fn create_global_command(project_root: &str, name: &str, content: &str) -> SlashCommandPrompt { + SlashCommandPrompt { + prompt_type: PromptKind::SlashCommand, content: content.to_string(), length: content.len(), dir: create_relative_path(project_root, &format!("commands/{name}.mdx")), @@ -770,10 +722,10 @@ mod tests { series: None, seri_name: None, global_only: Some(true), - yaml_front_matter: Some(FastCommandYAMLFrontMatter { + yaml_front_matter: Some(SlashCommandYAMLFrontMatter { description: Some(format!("{name} description")), scope: Some(RuleScope::Global), - ..FastCommandYAMLFrontMatter::default() + ..SlashCommandYAMLFrontMatter::default() }), raw_mdx_content: None, markdown_contents: None, @@ -901,7 +853,7 @@ mod tests { }, ], }), - fast_commands: Some(vec![ + slash_commands: Some(vec![ create_project_command( &prompt_source_root.to_string_lossy(), "build", @@ -995,22 +947,10 @@ mod tests { .ends_with("project-a/.factory/skills/ship/SKILL.md") }) .unwrap(); - let skill_resource = plan - .output_files - .iter() - .find(|entry| { - entry - .path - .replace('\\', "/") - .ends_with("project-a/.factory/skills/ship/assets/blob.bin") - }) - .unwrap(); - assert_eq!( skill_main.content, "---\nname: ship\ndescription: Skill description\n---\n\nSkill body" ); - assert_eq!(skill_resource.encoding.as_deref(), Some("base64")); assert!( output_paths.contains( &home_dir @@ -1046,7 +986,7 @@ mod tests { create_project(&workspace_dir.to_string_lossy(), "project-a"), ], }), - fast_commands: Some(vec![create_global_command( + slash_commands: Some(vec![create_global_command( &prompt_source_root.to_string_lossy(), "doctor", "Run doctor", @@ -1162,4 +1102,85 @@ mod tests { ); }); } + + #[test] + fn skill_output_only_contains_skill_md() { + let skill = SkillPrompt { + prompt_type: PromptKind::Skill, + content: "Skill body".to_string(), + length: "Skill body".len(), + skill_name: "test-skill".to_string(), + dir: RelativePath::new("test-skill", "/workspace"), + yaml_front_matter: Some(SkillYAMLFrontMatter { + description: Some("Skill description".to_string()), + ..SkillYAMLFrontMatter::default() + }), + mcp_config: None, + child_docs: Some(vec![SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "Guide body".to_string(), + length: "Guide body".len(), + file_path_kind: FilePathKind::Relative, + relative_path: "guide.mdx".to_string(), + dir: RelativePath::new("guide.mdx", "/workspace"), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }]), + resources: Some(vec![SkillResource { + prompt_type: PromptKind::SkillResource, + extension: ".bin".to_string(), + file_name: "blob.bin".to_string(), + relative_path: "assets/blob.bin".to_string(), + content: "aGVsbG8=".to_string(), + encoding: SkillResourceEncoding::Base64, + length: 8, + mime_type: None, + }]), + markdown_contents: None, + }; + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new("/workspace"), + projects: vec![Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: "root".to_string(), + length: 4, + file_path_kind: FilePathKind::Root, + dir: RootPath::new("/workspace"), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }), + ..Project::default() + }], + }), + skills: Some(vec![skill]), + ..OutputContext::default() + }; + + let plan = build_droid_output_plan(&context).unwrap(); + let skill_paths: Vec<&str> = plan + .output_files + .iter() + .map(|f| f.path.as_str()) + .filter(|p| p.contains(".factory/skills/test-skill")) + .collect(); + + assert_eq!( + skill_paths.len(), + 1, + "should only have SKILL.md, got: {:?}", + skill_paths + ); + assert!( + skill_paths[0].ends_with("SKILL.md"), + "output should be SKILL.md, got: {}", + skill_paths[0] + ); + } } diff --git a/sdk/src/domain/output_plans/opencode_output_plan.rs b/sdk/src/domain/output_plans/opencode_output_plan.rs index 11f34092..8715fced 100644 --- a/sdk/src/domain/output_plans/opencode_output_plan.rs +++ b/sdk/src/domain/output_plans/opencode_output_plan.rs @@ -142,61 +142,11 @@ fn build_output_files( content: build_skill_content(skill), encoding: None, }); - - if let Some(child_docs) = skill.child_docs.as_ref() { - for child_doc in child_docs { - let child_path = child_doc - .relative_path - .replace(".mdx", ".md") - .replace(".src.md", ".md"); - output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir - .join(&child_path) - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: child_doc.content.clone(), - encoding: None, - }); - } - } - - if let Some(resources) = skill.resources.as_ref() { - for resource in resources { - let encoding = match resource.encoding { - crate::domain::plugin_shared::SkillResourceEncoding::Base64 => { - Some("base64".to_string()) - } - crate::domain::plugin_shared::SkillResourceEncoding::Text => None, - }; - output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir - .join(&resource.relative_path) - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: resource.content.clone(), - encoding, - }); - } - } - - if let Some(mcp_config) = skill.mcp_config.as_ref() { - output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir - .join("mcp.json") - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: mcp_config.raw_content.clone(), - encoding: None, - }); - } } } } - if let Some(commands) = context.fast_commands.as_ref() { + if let Some(commands) = context.slash_commands.as_ref() { for project in &project_output_projects { let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { continue; @@ -297,7 +247,7 @@ fn build_agent_content(agent: &crate::domain::plugin_shared::SubAgentPrompt) -> wrap_yaml_front_matter(&metadata, &agent.content) } -fn build_command_content(command: &crate::domain::plugin_shared::FastCommandPrompt) -> String { +fn build_command_content(command: &crate::domain::plugin_shared::SlashCommandPrompt) -> String { let mut metadata = if let Some(ref yaml_fm) = command.yaml_front_matter { match serde_json::to_value(yaml_fm) { Ok(Value::Object(map)) => map, @@ -743,4 +693,96 @@ mod tests { assert_eq!(css_color_name_to_hex("darkgray"), Some("#A9A9A9")); assert_eq!(css_color_name_to_hex("darkgrey"), Some("#A9A9A9")); } + + fn make_test_skill(name: &str) -> crate::domain::plugin_shared::SkillPrompt { + use crate::domain::plugin_shared::*; + SkillPrompt { + prompt_type: PromptKind::Skill, + content: "body".to_string(), + length: 4, + skill_name: name.to_string(), + dir: crate::infra::path_types::RelativePath::new(name, "/workspace/aindex/skills"), + yaml_front_matter: Some(SkillYAMLFrontMatter { + description: Some("desc".to_string()), + ..SkillYAMLFrontMatter::default() + }), + child_docs: Some(vec![SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "guide".to_string(), + length: 5, + file_path_kind: crate::infra::path_types::FilePathKind::Relative, + relative_path: "guide.mdx".to_string(), + dir: crate::infra::path_types::RelativePath::new("guide.mdx", "/workspace/aindex/skills/test"), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }]), + resources: Some(vec![SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "txt".to_string(), + file_name: "notes.txt".to_string(), + relative_path: "assets/notes.txt".to_string(), + content: "notes".to_string(), + encoding: SkillResourceEncoding::Text, + length: 5, + mime_type: None, + }]), + mcp_config: Some(SkillMcpConfig { + prompt_type: PromptKind::SkillMcpConfig, + mcp_servers: std::collections::HashMap::new(), + raw_content: "{}".to_string(), + }), + markdown_contents: None, + } + } + + #[test] + fn skill_output_only_contains_skill_md() { + use crate::domain::plugin_shared::*; + + let skill = make_test_skill("test-skill"); + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new("/workspace"), + projects: vec![Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: "root".to_string(), + length: 4, + file_path_kind: FilePathKind::Root, + dir: RootPath::new("/workspace"), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }), + ..Project::default() + }], + }), + skills: Some(vec![skill]), + ..OutputContext::default() + }; + + let plan = build_opencode_output_plan(&context).unwrap(); + let skill_paths: Vec<&str> = plan + .output_files + .iter() + .map(|f| f.path.as_str()) + .filter(|p| p.contains(".opencode/skills/test-skill")) + .collect(); + + assert_eq!( + skill_paths.len(), + 1, + "should only have SKILL.md, got: {:?}", + skill_paths + ); + assert!( + skill_paths[0].ends_with("SKILL.md"), + "output should be SKILL.md, got: {}", + skill_paths[0] + ); + } } diff --git a/sdk/src/domain/plugin_shared.rs b/sdk/src/domain/plugin_shared.rs index 1bec2ab5..431909f8 100644 --- a/sdk/src/domain/plugin_shared.rs +++ b/sdk/src/domain/plugin_shared.rs @@ -24,7 +24,7 @@ pub enum PromptKind { GlobalMemory, ProjectRootMemory, ProjectChildrenMemory, - FastCommand, + SlashCommand, SubAgent, Skill, SkillChildDoc, @@ -138,7 +138,7 @@ pub struct RuleYAMLFrontMatter { #[derive(Debug, Clone, Default, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct FastCommandYAMLFrontMatter { +pub struct SlashCommandYAMLFrontMatter { #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option, #[serde(default, skip_serializing_if = "Option::is_none")] @@ -230,10 +230,10 @@ pub struct RulePrompt { pub markdown_contents: Option>, } -/// Fast command prompt. +/// Slash command prompt. #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct FastCommandPrompt { +pub struct SlashCommandPrompt { #[serde(rename = "type")] pub prompt_type: PromptKind, pub content: String, @@ -251,7 +251,7 @@ pub struct FastCommandPrompt { #[serde(default, skip_serializing_if = "Option::is_none")] pub global_only: Option, #[serde(default, skip_serializing_if = "Option::is_none")] - pub yaml_front_matter: Option, + pub yaml_front_matter: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub raw_mdx_content: Option, #[serde(default, skip_serializing_if = "Option::is_none")] @@ -571,8 +571,8 @@ mod tests { #[test] fn test_enums_serialize() { assert_eq!( - serde_json::to_string(&PromptKind::FastCommand).unwrap(), - "\"FastCommand\"" + serde_json::to_string(&PromptKind::SlashCommand).unwrap(), + "\"SlashCommand\"" ); assert_eq!( serde_json::to_string(&RuleScope::Global).unwrap(), diff --git a/sdk/src/repositories/command.rs b/sdk/src/repositories/command.rs index 978e4c51..318e3e03 100644 --- a/sdk/src/repositories/command.rs +++ b/sdk/src/repositories/command.rs @@ -3,7 +3,7 @@ use serde_json::Value; use crate::domain::config; use crate::domain::plugin_shared::{ - FastCommandPrompt, FastCommandYAMLFrontMatter, PromptKind, RelativePath, + SlashCommandPrompt, SlashCommandYAMLFrontMatter, PromptKind, RelativePath, }; use crate::repositories::localized_reader::read_flat_files; @@ -47,7 +47,7 @@ fn validate_command_metadata( fn build_command_prompt( entry: &crate::repositories::localized_reader::FlatFileEntry, dir: &str, -) -> Result { +) -> Result { let compiled = entry .compiled .as_ref() @@ -98,7 +98,7 @@ fn build_command_prompt( None } else { Some( - serde_json::from_value::(Value::Object( + serde_json::from_value::(Value::Object( compiled.metadata.clone(), )) .map_err(|e| crate::CliError::ConfigError(e.to_string()))?, @@ -108,8 +108,8 @@ fn build_command_prompt( let content = compiled.content.clone(); let length = content.len(); - Ok(FastCommandPrompt { - prompt_type: PromptKind::FastCommand, + Ok(SlashCommandPrompt { + prompt_type: PromptKind::SlashCommand, content, length, dir: RelativePath::new(&format!("{}.mdx", entry.name), dir), @@ -137,7 +137,7 @@ pub fn collect_command(options_json: &str) -> Result { let entries = read_flat_files(&dir_str, global_scope_json.as_deref())?; - let mut prompts: Vec = Vec::new(); + let mut prompts: Vec = Vec::new(); for entry in &entries { if entry.compiled.is_none() && (entry.src_zh.is_some() || entry.src_en.is_some()) { return Err(crate::CliError::ConfigError(format!( @@ -153,7 +153,7 @@ pub fn collect_command(options_json: &str) -> Result { #[derive(Debug, Clone, serde::Serialize)] #[serde(rename_all = "camelCase")] struct CommandResult { - commands: Vec, + commands: Vec, } let result = CommandResult { commands: prompts }; diff --git a/sdk/src/services/common.rs b/sdk/src/services/common.rs index 4ec68f07..2dabd586 100644 --- a/sdk/src/services/common.rs +++ b/sdk/src/services/common.rs @@ -147,7 +147,7 @@ pub struct GlobalMemoryEnvelope { #[serde(rename_all = "camelCase")] pub struct CommandsEnvelope { #[serde(default)] - pub commands: Vec, + pub commands: Vec, } #[derive(Debug, Default, serde::Deserialize)] @@ -505,7 +505,7 @@ pub fn collect_context( zed_config_files: zed.zed_config_files, jetbrains_config_files: jetbrains.jetbrains_config_files, editor_config_files: editor_config.editor_config_files, - fast_commands: (!commands.commands.is_empty()).then_some(commands.commands), + slash_commands: (!commands.commands.is_empty()).then_some(commands.commands), sub_agents: (!sub_agents.sub_agents.is_empty()).then_some(sub_agents.sub_agents), skills: (!skills.skills.is_empty()).then_some(skills.skills), rules: (!rules.rules.is_empty()).then_some(rules.rules), diff --git a/sdk/src/services/dry_run_service.rs b/sdk/src/services/dry_run_service.rs index 62043b37..39ac29fd 100644 --- a/sdk/src/services/dry_run_service.rs +++ b/sdk/src/services/dry_run_service.rs @@ -79,7 +79,7 @@ pub fn dry_run(options: MemorySyncCommandOptions) -> Result Date: Thu, 30 Apr 2026 03:52:11 +0800 Subject: [PATCH 07/45] =?UTF-8?q?refactor:=20apply=20code=20review=20fixes?= =?UTF-8?q?=20(7/10=20=E2=86=92=20hardened)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 1. Lock down embedded TS runtime: env allowlist, hide untrusted env by default 2. Path protection: install-time protected-path guard for file writes 3. Re-enable Clippy in xtask lint and CI 4. Replace unwrap/expect in GUI/MCP startup paths with proper error handling 5. Narrow tnmsd public exports with pub(crate) where possible 6. Add rustdoc to exported SDK types 7. Strengthen tests: malicious paths, env isolation, install protection 37 files changed, +515/-300 --- cli/local-tests/src/lib.rs | 26 +-- cli/local-tests/tests/clean_blackbox.rs | 24 +-- .../tests/opencode_agent_mode_validation.rs | 15 +- cli/local-tests/tests/rules_source_smoke.rs | 8 +- cli/src/commands/package.rs | 6 +- gui/src-tauri/src/lib.rs | 7 +- gui/src-tauri/src/tray.rs | 12 +- mcp/src/commands/package.rs | 6 +- mcp/src/main.rs | 2 +- sdk/src/domain/base_output_plans.rs | 6 +- sdk/src/domain/config/mod.rs | 18 +-- .../output_plans/claude_code_output_plan.rs | 7 +- .../domain/output_plans/codex_output_plan.rs | 5 +- .../domain/output_plans/kiro_output_plan.rs | 9 +- .../output_plans/opencode_output_plan.rs | 6 +- .../domain/output_plans/warp_output_plan.rs | 37 +++-- sdk/src/infra/deno_runtime.rs | 96 +++++++++++- sdk/src/infra/desk_paths.rs | 10 +- sdk/src/infra/md_compiler/expression_eval.rs | 2 +- sdk/src/infra/md_compiler/mdx_to_md.rs | 2 +- sdk/src/infra/script_runtime.rs | 30 ++-- sdk/src/policy/cleanup.rs | 8 + sdk/src/policy/md_cleanup.rs | 24 +-- sdk/src/repositories/aindex_resolvers.rs | 2 +- sdk/src/repositories/command.rs | 7 +- sdk/src/repositories/global_memory.rs | 2 +- sdk/src/repositories/localized_reader.rs | 41 ++--- sdk/src/repositories/project_prompt.rs | 26 +-- sdk/src/repositories/prompt_artifact.rs | 2 +- sdk/src/repositories/readme.rs | 2 +- sdk/src/repositories/rule.rs | 2 +- sdk/src/repositories/shared_ignore.rs | 28 ++-- sdk/src/repositories/skill.rs | 124 +++++++-------- sdk/src/services/clean_service.rs | 47 +++--- sdk/src/services/dry_run_service.rs | 10 +- sdk/src/services/install_service.rs | 148 +++++++++++++++++- xtask/src/main.rs | 8 +- 37 files changed, 515 insertions(+), 300 deletions(-) diff --git a/cli/local-tests/src/lib.rs b/cli/local-tests/src/lib.rs index 41eea452..24e8b424 100644 --- a/cli/local-tests/src/lib.rs +++ b/cli/local-tests/src/lib.rs @@ -104,8 +104,8 @@ impl LocalTestRunner { let parsed: serde_json::Value = serde_json::from_str(&raw).ok()?; let ws_dir = parsed.get("workspaceDir")?.as_str()?; // 展开 ~/ 为 home_dir - let expanded = if ws_dir.starts_with("~/") { - home_dir().join(&ws_dir[2..]) + let expanded = if let Some(stripped) = ws_dir.strip_prefix("~/") { + home_dir().join(stripped) } else { PathBuf::from(ws_dir) }; @@ -373,6 +373,12 @@ impl LocalTestRunner { } } +impl Default for LocalTestRunner { + fn default() -> Self { + Self::new() + } +} + // --------------------------------------------------------------------------- // Cross-process file lock — prevents test binaries from interfering with each // other when running local tests on the shared project directory. @@ -395,15 +401,13 @@ fn acquire_cross_process_lock() -> CrossProcessLock { Ok(_) => return CrossProcessLock(Some(lock_path)), Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => { // Stale-lock detection: if older than 5 minutes, remove and retry - if let Ok(meta) = std::fs::metadata(&lock_path) { - if let Ok(created) = meta.created() { - if let Ok(elapsed) = created.elapsed() { - if elapsed > Duration::from_secs(300) { - let _ = std::fs::remove_file(&lock_path); - continue; - } - } - } + if let Ok(meta) = std::fs::metadata(&lock_path) + && let Ok(created) = meta.created() + && let Ok(elapsed) = created.elapsed() + && elapsed > Duration::from_secs(300) + { + let _ = std::fs::remove_file(&lock_path); + continue; } std::thread::sleep(Duration::from_millis(200)); } diff --git a/cli/local-tests/tests/clean_blackbox.rs b/cli/local-tests/tests/clean_blackbox.rs index 78c814a5..e6da7903 100644 --- a/cli/local-tests/tests/clean_blackbox.rs +++ b/cli/local-tests/tests/clean_blackbox.rs @@ -94,11 +94,11 @@ fn local_clean_from_memory_sync_does_not_clean_other_projects() { "memory-sync/AGENTS.md should exist after install" ); assert!( - runner.file_exists_at(&knowladge.join("AGENTS.md")), + runner.file_exists_at(knowladge.join("AGENTS.md")), "knowladge/AGENTS.md should exist after install" ); assert!( - runner.file_exists_at(&aindex.join("AGENTS.md")), + runner.file_exists_at(aindex.join("AGENTS.md")), "aindex/AGENTS.md should exist after manual create" ); @@ -115,11 +115,11 @@ fn local_clean_from_memory_sync_does_not_clean_other_projects() { // 其他项目的 AGENTS.md 应该保留 assert!( - runner.file_exists_at(&knowladge.join("AGENTS.md")), + runner.file_exists_at(knowladge.join("AGENTS.md")), "knowladge/AGENTS.md should still exist after scoped clean" ); assert!( - runner.file_exists_at(&aindex.join("AGENTS.md")), + runner.file_exists_at(aindex.join("AGENTS.md")), "aindex/AGENTS.md should still exist after scoped clean" ); } @@ -150,11 +150,11 @@ fn local_clean_from_aindex_does_not_clean_memory_sync() { "memory-sync/AGENTS.md should exist after install" ); assert!( - runner.file_exists_at(&knowladge.join("AGENTS.md")), + runner.file_exists_at(knowladge.join("AGENTS.md")), "knowladge/AGENTS.md should exist after install" ); assert!( - runner.file_exists_at(&aindex.join("AGENTS.md")), + runner.file_exists_at(aindex.join("AGENTS.md")), "aindex/AGENTS.md should exist after manual create" ); @@ -165,7 +165,7 @@ fn local_clean_from_aindex_does_not_clean_memory_sync() { // aindex 的 AGENTS.md 应该被清理(在作用域内) assert!( - !runner.file_exists_at(&aindex.join("AGENTS.md")), + !runner.file_exists_at(aindex.join("AGENTS.md")), "aindex/AGENTS.md should be removed after scoped clean" ); @@ -175,7 +175,7 @@ fn local_clean_from_aindex_does_not_clean_memory_sync() { "memory-sync/AGENTS.md should still exist after scoped clean from aindex" ); assert!( - runner.file_exists_at(&knowladge.join("AGENTS.md")), + runner.file_exists_at(knowladge.join("AGENTS.md")), "knowladge/AGENTS.md should still exist after scoped clean from aindex" ); } @@ -206,11 +206,11 @@ fn local_clean_from_home_cleans_all_projects() { "memory-sync/AGENTS.md should exist after install" ); assert!( - runner.file_exists_at(&knowladge.join("AGENTS.md")), + runner.file_exists_at(knowladge.join("AGENTS.md")), "knowladge/AGENTS.md should exist after install" ); assert!( - runner.file_exists_at(&aindex.join("AGENTS.md")), + runner.file_exists_at(aindex.join("AGENTS.md")), "aindex/AGENTS.md should exist after manual create" ); @@ -225,11 +225,11 @@ fn local_clean_from_home_cleans_all_projects() { "memory-sync/AGENTS.md should be removed after global clean" ); assert!( - !runner.file_exists_at(&knowladge.join("AGENTS.md")), + !runner.file_exists_at(knowladge.join("AGENTS.md")), "knowladge/AGENTS.md should be removed after global clean" ); assert!( - !runner.file_exists_at(&aindex.join("AGENTS.md")), + !runner.file_exists_at(aindex.join("AGENTS.md")), "aindex/AGENTS.md should be removed after global clean" ); } diff --git a/cli/local-tests/tests/opencode_agent_mode_validation.rs b/cli/local-tests/tests/opencode_agent_mode_validation.rs index 2c70db34..b0aa535f 100644 --- a/cli/local-tests/tests/opencode_agent_mode_validation.rs +++ b/cli/local-tests/tests/opencode_agent_mode_validation.rs @@ -33,9 +33,10 @@ fn extract_mode_from_front_matter_line(line: &str) -> Option { return None; } // 去除引号 - let value = if after_key.starts_with('"') && after_key.ends_with('"') && after_key.len() >= 2 { - &after_key[1..after_key.len() - 1] - } else if after_key.starts_with('\'') && after_key.ends_with('\'') && after_key.len() >= 2 { + let value = if after_key.len() >= 2 + && ((after_key.starts_with('"') && after_key.ends_with('"')) + || (after_key.starts_with('\'') && after_key.ends_with('\''))) + { &after_key[1..after_key.len() - 1] } else { after_key @@ -61,10 +62,10 @@ fn extract_mode_from_agent_file(content: &str) -> Option { break; } } - if in_front_matter { - if let Some(mode) = extract_mode_from_front_matter_line(line) { - return Some(mode); - } + if in_front_matter + && let Some(mode) = extract_mode_from_front_matter_line(line) + { + return Some(mode); } } None diff --git a/cli/local-tests/tests/rules_source_smoke.rs b/cli/local-tests/tests/rules_source_smoke.rs index 3b85c81f..93438dc8 100644 --- a/cli/local-tests/tests/rules_source_smoke.rs +++ b/cli/local-tests/tests/rules_source_smoke.rs @@ -107,10 +107,10 @@ fn collect_src_mdx_files(dir: &Path) -> Vec { }; if ft.is_dir() { files.extend(collect_src_mdx_files(&path)); - } else if let Some(name) = path.file_name().and_then(|n| n.to_str()) { - if name.ends_with(".src.mdx") { - files.push(path); - } + } else if let Some(name) = path.file_name().and_then(|n| n.to_str()) + && name.ends_with(".src.mdx") + { + files.push(path); } } files diff --git a/cli/src/commands/package.rs b/cli/src/commands/package.rs index d531c724..36fab300 100644 --- a/cli/src/commands/package.rs +++ b/cli/src/commands/package.rs @@ -186,7 +186,7 @@ fn find_target(suffix: &str) -> &'static PackageTarget { PACKAGE_TARGETS .iter() .find(|target| target.suffix == suffix) - .expect("package target mapping must stay in sync") + .unwrap_or_else(|| unreachable!("package target mapping must stay in sync")) } fn package_root() -> PathBuf { @@ -202,8 +202,8 @@ fn workspace_root() -> PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")) .parent() - .expect("cli crate should always live under the workspace root") - .to_path_buf() + .map(Path::to_path_buf) + .unwrap_or_else(|| PathBuf::from(env!("CARGO_MANIFEST_DIR"))) } #[cfg(unix)] diff --git a/gui/src-tauri/src/lib.rs b/gui/src-tauri/src/lib.rs index 07088fe5..9b662cc6 100644 --- a/gui/src-tauri/src/lib.rs +++ b/gui/src-tauri/src/lib.rs @@ -26,7 +26,9 @@ pub fn run() { .setup(|app| { tray::create_tray(app)?; - let window = app.get_webview_window("main").unwrap(); + let window = app + .get_webview_window("main") + .ok_or_else(|| tauri::Error::WindowNotFound)?; let window_clone = window.clone(); window.on_window_event(move |event| { if let tauri::WindowEvent::CloseRequested { api, .. } = event { @@ -38,5 +40,6 @@ pub fn run() { Ok(()) }) .run(tauri::generate_context!()) - .expect("error while running tauri application"); + .map_err(|error| eprintln!("error while running tauri application: {error}")) + .ok(); } diff --git a/gui/src-tauri/src/tray.rs b/gui/src-tauri/src/tray.rs index c729bbd9..8c9267c4 100644 --- a/gui/src-tauri/src/tray.rs +++ b/gui/src-tauri/src/tray.rs @@ -41,7 +41,17 @@ pub fn create_tray(app: &tauri::App) -> Result { // ── Build the tray icon ───────────────────────────────────────────── TrayIconBuilder::new() - .icon(app.default_window_icon().unwrap().clone()) + .icon( + app + .default_window_icon() + .ok_or_else(|| { + tauri::Error::InvalidIcon(std::io::Error::new( + std::io::ErrorKind::NotFound, + "default window icon is not configured", + )) + })? + .clone(), + ) .menu(&menu) // Handle context-menu item clicks. .on_menu_event(|app, event| { diff --git a/mcp/src/commands/package.rs b/mcp/src/commands/package.rs index b01c5335..50bea94d 100644 --- a/mcp/src/commands/package.rs +++ b/mcp/src/commands/package.rs @@ -147,7 +147,7 @@ fn find_target(suffix: &str) -> &'static PackageTarget { PACKAGE_TARGETS .iter() .find(|target| target.suffix == suffix) - .expect("package target mapping must stay in sync") + .unwrap_or_else(|| unreachable!("package target mapping must stay in sync")) } fn package_root() -> PathBuf { @@ -163,8 +163,8 @@ fn workspace_root() -> PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")) .parent() - .expect("mcp crate should always live under the workspace root") - .to_path_buf() + .map(Path::to_path_buf) + .unwrap_or_else(|| PathBuf::from(env!("CARGO_MANIFEST_DIR"))) } #[cfg(unix)] diff --git a/mcp/src/main.rs b/mcp/src/main.rs index 8cc7d8cc..10d5bd4f 100644 --- a/mcp/src/main.rs +++ b/mcp/src/main.rs @@ -279,7 +279,7 @@ fn run_stdio_server() { } }; - let is_notification = !request.as_object().map_or(false, |m| m.contains_key("id")); + let is_notification = !request.as_object().is_some_and(|m| m.contains_key("id")); if is_notification { continue; } diff --git a/sdk/src/domain/base_output_plans.rs b/sdk/src/domain/base_output_plans.rs index 2e708eef..6782254c 100644 --- a/sdk/src/domain/base_output_plans.rs +++ b/sdk/src/domain/base_output_plans.rs @@ -378,9 +378,7 @@ fn resolve_project_root_dir(workspace: &Workspace, project: &Project) -> Option< fn resolve_relative_path(relative_path: &RelativePath) -> PathBuf { let raw_path = Path::new(&relative_path.path); - let candidate = if raw_path.is_absolute() { - raw_path.to_path_buf() - } else if relative_path.base_path.is_empty() { + let candidate = if raw_path.is_absolute() || relative_path.base_path.is_empty() { raw_path.to_path_buf() } else { PathBuf::from(&relative_path.base_path).join(raw_path) @@ -532,7 +530,7 @@ fn sanitize_git_exclude_content(content: &str) -> String { if trimmed.is_empty() { return true; } - !(trimmed.starts_with('#') && !trimmed.starts_with("\\#")) + !trimmed.starts_with('#') || trimmed.starts_with("\\#") }) .collect::>() .join("\n"); diff --git a/sdk/src/domain/config/mod.rs b/sdk/src/domain/config/mod.rs index 269ee8d8..d956cd71 100644 --- a/sdk/src/domain/config/mod.rs +++ b/sdk/src/domain/config/mod.rs @@ -192,7 +192,7 @@ pub struct PluginsConfig { /// User configuration file (.tnmsc.json). /// All fields are optional — missing fields use default values. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, JsonSchema)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, JsonSchema)] #[serde(rename_all = "camelCase")] pub struct UserConfigFile { #[serde(default, skip_serializing_if = "Option::is_none")] @@ -211,20 +211,6 @@ pub struct UserConfigFile { pub plugins: Option, } -impl Default for UserConfigFile { - fn default() -> Self { - Self { - version: None, - workspace_dir: None, - log_level: None, - profile: None, - code_styles: None, - windows: None, - plugins: None, - } - } -} - // --------------------------------------------------------------------------- // Result types // --------------------------------------------------------------------------- @@ -270,7 +256,7 @@ pub struct RuntimeEnvironmentContext { fn home_dir() -> Option { std::env::var_os("HOME") .map(PathBuf::from) - .or_else(|| dirs::home_dir()) + .or_else(dirs::home_dir) } fn normalize_posix_like_path(raw_path: &str) -> String { diff --git a/sdk/src/domain/output_plans/claude_code_output_plan.rs b/sdk/src/domain/output_plans/claude_code_output_plan.rs index 45d5f6ea..f4855fad 100644 --- a/sdk/src/domain/output_plans/claude_code_output_plan.rs +++ b/sdk/src/domain/output_plans/claude_code_output_plan.rs @@ -264,7 +264,7 @@ fn build_agent_content(agent: &crate::domain::plugin_shared::SubAgentPrompt) -> // Filter out empty arrays and null values metadata.retain(|_, v| { - !v.is_null() && !(v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) + !(v.is_null() || v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) }); if metadata.is_empty() { @@ -294,7 +294,7 @@ fn build_command_content(command: &crate::domain::plugin_shared::SlashCommandPro // Filter out empty arrays and null values metadata.retain(|_, v| { - !v.is_null() && !(v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) + !(v.is_null() || v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) }); if metadata.is_empty() { @@ -322,7 +322,7 @@ fn build_skill_content(skill: &crate::domain::plugin_shared::SkillPrompt) -> Str // Filter out empty arrays and null values metadata.retain(|_, v| { - !v.is_null() && !(v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) + !(v.is_null() || v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) }); if metadata.is_empty() { @@ -366,6 +366,7 @@ fn indent_yaml_list_items(yaml: &str) -> String { } #[cfg(test)] +#[allow(clippy::items_after_test_module)] mod tests { use super::*; use serde_json::json; diff --git a/sdk/src/domain/output_plans/codex_output_plan.rs b/sdk/src/domain/output_plans/codex_output_plan.rs index 146a2397..f3169eda 100644 --- a/sdk/src/domain/output_plans/codex_output_plan.rs +++ b/sdk/src/domain/output_plans/codex_output_plan.rs @@ -224,8 +224,7 @@ fn build_command_content(command: &crate::domain::plugin_shared::SlashCommandPro .into_iter() .filter(|(k, v)| { // Codex only supports description and argument-hint - !v.is_null() - && !(v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) + !(v.is_null() || v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) && (k == "description" || k == "argument-hint") }) .collect(); @@ -255,7 +254,7 @@ fn build_skill_content(skill: &crate::domain::plugin_shared::SkillPrompt) -> Str // Filter out empty arrays and null values metadata.retain(|_, v| { - !v.is_null() && !(v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) + !(v.is_null() || v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) }); if metadata.is_empty() { diff --git a/sdk/src/domain/output_plans/kiro_output_plan.rs b/sdk/src/domain/output_plans/kiro_output_plan.rs index 3fc88d26..16081603 100644 --- a/sdk/src/domain/output_plans/kiro_output_plan.rs +++ b/sdk/src/domain/output_plans/kiro_output_plan.rs @@ -187,14 +187,9 @@ mod tests { .map(|d| d.path.as_str()) .collect(); + assert!(paths.contains(&"/tmp/workspace/.kiro/streening"), "expected workspace root glob"); assert!( - paths.iter().any(|p| *p == "/tmp/workspace/.kiro/streening"), - "expected workspace root glob" - ); - assert!( - paths - .iter() - .any(|p| *p == "/tmp/workspace/project-a/.kiro/specs/**/*"), + paths.contains(&"/tmp/workspace/project-a/.kiro/specs/**/*"), "expected project glob" ); assert!( diff --git a/sdk/src/domain/output_plans/opencode_output_plan.rs b/sdk/src/domain/output_plans/opencode_output_plan.rs index 8715fced..1d2264d7 100644 --- a/sdk/src/domain/output_plans/opencode_output_plan.rs +++ b/sdk/src/domain/output_plans/opencode_output_plan.rs @@ -237,7 +237,7 @@ fn build_agent_content(agent: &crate::domain::plugin_shared::SubAgentPrompt) -> metadata.remove("model"); metadata.retain(|_, v| { - !v.is_null() && !(v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) + !(v.is_null() || v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) }); if metadata.is_empty() { @@ -265,7 +265,7 @@ fn build_command_content(command: &crate::domain::plugin_shared::SlashCommandPro metadata.insert("command".to_string(), Value::String(command_source)); metadata.retain(|_, v| { - !v.is_null() && !(v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) + !(v.is_null() || v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) }); if metadata.is_empty() { @@ -291,7 +291,7 @@ fn build_skill_content(skill: &crate::domain::plugin_shared::SkillPrompt) -> Str ); metadata.retain(|_, v| { - !v.is_null() && !(v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) + !(v.is_null() || v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)) }); if metadata.is_empty() { diff --git a/sdk/src/domain/output_plans/warp_output_plan.rs b/sdk/src/domain/output_plans/warp_output_plan.rs index 4a9d9294..d9f66d11 100644 --- a/sdk/src/domain/output_plans/warp_output_plan.rs +++ b/sdk/src/domain/output_plans/warp_output_plan.rs @@ -101,28 +101,27 @@ fn build_output_files( } } - if let Some(ignore_config_files) = context.ai_agent_ignore_config_files.as_ref() { - if let Some(ignore_file) = ignore_config_files + if let Some(ignore_config_files) = context.ai_agent_ignore_config_files.as_ref() + && let Some(ignore_file) = ignore_config_files .iter() .find(|file| file.file_name == WARP_IGNORE_FILE) - { - for project in get_concrete_projects(workspace) { - let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { - continue; - }; - if project.is_prompt_source_project == Some(true) { - continue; - } - output_files.push(BaseOutputFileDeclarationDto { - path: project_root_dir - .join(WARP_IGNORE_FILE) - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: ignore_file.content.clone(), - encoding: None, - }); + { + for project in get_concrete_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + if project.is_prompt_source_project == Some(true) { + continue; } + output_files.push(BaseOutputFileDeclarationDto { + path: project_root_dir + .join(WARP_IGNORE_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: ignore_file.content.clone(), + encoding: None, + }); } } diff --git a/sdk/src/infra/deno_runtime.rs b/sdk/src/infra/deno_runtime.rs index a0d5461f..f298a154 100644 --- a/sdk/src/infra/deno_runtime.rs +++ b/sdk/src/infra/deno_runtime.rs @@ -5,7 +5,7 @@ use std::borrow::Cow; use std::cell::RefCell; -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; use std::path::Path; use std::rc::Rc; @@ -73,7 +73,7 @@ impl DenoRuntime { let parsed_context: serde_json::Value = serde_json::from_str(context_json) .map_err(|error| format!("Invalid runtime context JSON: {error}"))?; - let env_map: HashMap = std::env::vars().collect(); + let env_map = allowed_environment(&parsed_context); let bootstrap = format!( r#" const __TNMS_CONTEXT_JSON = {context_json}; @@ -235,10 +235,24 @@ globalThis.Deno = {{ impl Default for DenoRuntime { fn default() -> Self { - Self::new().expect("Failed to initialize DenoRuntime") + Self } } +fn allowed_environment(context: &serde_json::Value) -> BTreeMap { + let allowlist = context + .get("allowedEnv") + .or_else(|| context.get("allowedEnvVars")) + .and_then(serde_json::Value::as_array); + + allowlist + .into_iter() + .flatten() + .filter_map(serde_json::Value::as_str) + .filter_map(|name| std::env::var(name).ok().map(|value| (name.to_string(), value))) + .collect() +} + struct TypescriptModuleLoader { source_maps: SourceMapStore, } @@ -359,7 +373,7 @@ mod tests { static ENV_LOCK: LazyLock> = LazyLock::new(|| Mutex::new(())); fn with_path_removed(f: impl FnOnce() -> T) -> T { - let _guard = ENV_LOCK.lock().unwrap(); + let _guard = ENV_LOCK.lock().unwrap_or_else(|error| error.into_inner()); let original = std::env::var_os("PATH"); unsafe { std::env::remove_var("PATH"); @@ -374,6 +388,22 @@ mod tests { result } + fn with_env_var(name: &str, value: &str, f: impl FnOnce() -> T) -> T { + let _guard = ENV_LOCK.lock().unwrap_or_else(|error| error.into_inner()); + let original = std::env::var_os(name); + unsafe { + std::env::set_var(name, value); + } + let result = f(); + unsafe { + match original { + Some(value) => std::env::set_var(name, value), + None => std::env::remove_var(name), + } + } + result + } + #[test] fn test_deno_runtime_creation() { let runtime = DenoRuntime::new(); @@ -442,4 +472,62 @@ console.log(`proxied/${ctx.logicalPath}`) ); assert_eq!(result.unwrap().trim(), "proxied/notes/today.md"); } + + #[test] + fn test_execute_ts_hides_untrusted_environment_by_default() { + with_env_var("TNMSD_SECRET_TOKEN_FOR_TEST", "secret-value", || { + let runtime = DenoRuntime::new().unwrap(); + let tmp = TempDir::new().unwrap(); + let script_path = tmp.path().join("env.ts"); + std::fs::write( + &script_path, + r#" +console.log(JSON.stringify({ + hasSecret: Deno.env.has("TNMSD_SECRET_TOKEN_FOR_TEST"), + secret: Deno.env.get("TNMSD_SECRET_TOKEN_FOR_TEST") ?? null, + envKeys: Object.keys(Deno.env.toObject()) +})) +"#, + ) + .unwrap(); + + let result = runtime.execute_ts(&script_path, "{}").unwrap(); + let parsed: serde_json::Value = serde_json::from_str(result.trim()).unwrap(); + + assert_eq!(parsed["hasSecret"], false); + assert_eq!(parsed["secret"], serde_json::Value::Null); + assert_eq!(parsed["envKeys"], serde_json::json!([])); + }); + } + + #[test] + fn test_execute_ts_exposes_only_allowed_environment_names() { + with_env_var("TNMSD_ALLOWED_ENV_FOR_TEST", "visible-value", || { + let runtime = DenoRuntime::new().unwrap(); + let tmp = TempDir::new().unwrap(); + let script_path = tmp.path().join("env.ts"); + std::fs::write( + &script_path, + r#" +console.log(JSON.stringify({ + allowed: Deno.env.get("TNMSD_ALLOWED_ENV_FOR_TEST") ?? null, + keys: Object.keys(Deno.env.toObject()) +})) +"#, + ) + .unwrap(); + + let context = serde_json::json!({ + "allowedEnv": ["TNMSD_ALLOWED_ENV_FOR_TEST", "TNMSD_MISSING_ENV_FOR_TEST"] + }); + let result = runtime.execute_ts(&script_path, &context.to_string()).unwrap(); + let parsed: serde_json::Value = serde_json::from_str(result.trim()).unwrap(); + + assert_eq!(parsed["allowed"], "visible-value"); + assert_eq!( + parsed["keys"], + serde_json::json!(["TNMSD_ALLOWED_ENV_FOR_TEST"]) + ); + }); + } } diff --git a/sdk/src/infra/desk_paths.rs b/sdk/src/infra/desk_paths.rs index 3184e709..fc456336 100644 --- a/sdk/src/infra/desk_paths.rs +++ b/sdk/src/infra/desk_paths.rs @@ -863,8 +863,7 @@ mod tests { fs::create_dir_all(&non_empty).unwrap(); fs::write(non_empty.join("keep.txt"), b"keep").unwrap(); - let result = - plan_workspace_empty_directory_cleanup(&workspace.to_string_lossy().into_owned(), &[], &[]); + let result = plan_workspace_empty_directory_cleanup(&workspace.to_string_lossy(), &[], &[]); assert_eq!( result.empty_dirs_to_delete, @@ -887,7 +886,7 @@ mod tests { fs::write(&file_to_delete, b"delete").unwrap(); let result = plan_workspace_empty_directory_cleanup( - &workspace.to_string_lossy().into_owned(), + &workspace.to_string_lossy(), &[file_to_delete.to_string_lossy().into_owned()], &[], ); @@ -910,8 +909,7 @@ mod tests { fs::create_dir_all(&empty_in_nm).unwrap(); - let result = - plan_workspace_empty_directory_cleanup(&workspace.to_string_lossy().into_owned(), &[], &[]); + let result = plan_workspace_empty_directory_cleanup(&workspace.to_string_lossy(), &[], &[]); assert!( !result @@ -935,7 +933,7 @@ mod tests { fs::create_dir_all(&nested_empty).unwrap(); let result = plan_workspace_empty_directory_cleanup( - &workspace.to_string_lossy().into_owned(), + &workspace.to_string_lossy(), &[], &[scheduled_dir.to_string_lossy().into_owned()], ); diff --git a/sdk/src/infra/md_compiler/expression_eval.rs b/sdk/src/infra/md_compiler/expression_eval.rs index edce55a7..6c1e4591 100644 --- a/sdk/src/infra/md_compiler/expression_eval.rs +++ b/sdk/src/infra/md_compiler/expression_eval.rs @@ -631,7 +631,7 @@ mod tests { let mut scope = EvaluationScope::new(); scope.insert("a".into(), json!(&a)); scope.insert("b".into(), json!(&b)); - let input = format!("https://{{a}}.com/{{b}}"); + let input = "https://{a}.com/{b}".to_string(); let result = evaluate_interpolations(&input, &scope); prop_assert_eq!(result, format!("https://{}.com/{}", a, b)); } diff --git a/sdk/src/infra/md_compiler/mdx_to_md.rs b/sdk/src/infra/md_compiler/mdx_to_md.rs index 61f5c8cd..9adfc0f4 100644 --- a/sdk/src/infra/md_compiler/mdx_to_md.rs +++ b/sdk/src/infra/md_compiler/mdx_to_md.rs @@ -822,7 +822,7 @@ mod tests { Some("[question]") ); assert!( - result.metadata.exports.get("allowedTools").is_some(), + result.metadata.exports.contains_key("allowedTools"), "allowedTools should be extracted" ); } diff --git a/sdk/src/infra/script_runtime.rs b/sdk/src/infra/script_runtime.rs index 0d74ee15..845ac143 100644 --- a/sdk/src/infra/script_runtime.rs +++ b/sdk/src/infra/script_runtime.rs @@ -188,22 +188,22 @@ pub fn resolve_public_path_impl( // Try Deno runtime first (if available) let deno = DenoRuntime::new().ok(); - if let Some(runtime) = deno { - if runtime.is_available() { - let proxy_path = std::path::Path::new(&ctx.aindex_dir) - .join("public") - .join("proxy.ts"); - let aindex_public_dir = build_aindex_public_dir(&ctx.aindex_dir)?; - if let Ok(result) = resolve_path_via_proxy_impl( - &proxy_path, - &aindex_public_dir, - logical_path, - serde_json::json!({}), - ) { - return Ok(result); - } - // Fall through to Rust implementation if Deno fails + if let Some(runtime) = deno + && runtime.is_available() + { + let proxy_path = std::path::Path::new(&ctx.aindex_dir) + .join("public") + .join("proxy.ts"); + let aindex_public_dir = build_aindex_public_dir(&ctx.aindex_dir)?; + if let Ok(result) = resolve_path_via_proxy_impl( + &proxy_path, + &aindex_public_dir, + logical_path, + serde_json::json!({}), + ) { + return Ok(result); } + // Fall through to Rust implementation if Deno fails } // Fall back to built-in Rust implementation diff --git a/sdk/src/policy/cleanup.rs b/sdk/src/policy/cleanup.rs index cc176b92..e720ec88 100644 --- a/sdk/src/policy/cleanup.rs +++ b/sdk/src/policy/cleanup.rs @@ -967,6 +967,14 @@ fn get_protected_path_violation( get_protected_path_violation_for_key(&absolute_target_path, &canonical_target_key, guard) } +pub fn detect_protected_path_violation( + snapshot: &CleanupSnapshot, + target_path: &str, +) -> Result, String> { + let guard = create_guard(snapshot, &snapshot.protected_rules)?; + Ok(get_protected_path_violation(target_path, &guard)) +} + fn target_matches_project_root(target_path: &str, project_root_keys: &HashSet) -> bool { build_comparison_keys(target_path) .into_iter() diff --git a/sdk/src/policy/md_cleanup.rs b/sdk/src/policy/md_cleanup.rs index 45952ac7..d0317d22 100644 --- a/sdk/src/policy/md_cleanup.rs +++ b/sdk/src/policy/md_cleanup.rs @@ -31,7 +31,7 @@ fn clean_markdown_content(content: &str) -> String { let trimmed_lines: Vec = lines .into_iter() .map(|line| { - let trimmed = line.trim_end_matches(|c: char| c == ' ' || c == '\t'); + let trimmed = line.trim_end_matches([' ', '\t']); trimmed.to_string() }) .collect(); @@ -76,11 +76,11 @@ fn process_markdown_file( return; } - if !dry_run { - if let Err(err) = std::fs::write(file_path, &cleaned) { - errors.push((file_path.to_string_lossy().into_owned(), err.to_string())); - return; - } + if !dry_run + && let Err(err) = std::fs::write(file_path, &cleaned) + { + errors.push((file_path.to_string_lossy().into_owned(), err.to_string())); + return; } modified_files.push(file_path.to_string_lossy().into_owned()); @@ -113,12 +113,12 @@ fn process_directory( if file_type.is_dir() { process_directory(&entry_path, modified_files, skipped_files, errors, dry_run); - } else if file_type.is_file() { - if let Some(name) = entry_path.file_name() { - let name_str = name.to_string_lossy(); - if name_str.ends_with(".md") { - process_markdown_file(&entry_path, modified_files, skipped_files, errors, dry_run); - } + } else if file_type.is_file() + && let Some(name) = entry_path.file_name() + { + let name_str = name.to_string_lossy(); + if name_str.ends_with(".md") { + process_markdown_file(&entry_path, modified_files, skipped_files, errors, dry_run); } } } diff --git a/sdk/src/repositories/aindex_resolvers.rs b/sdk/src/repositories/aindex_resolvers.rs index 590fe0c8..617183f8 100644 --- a/sdk/src/repositories/aindex_resolvers.rs +++ b/sdk/src/repositories/aindex_resolvers.rs @@ -540,7 +540,7 @@ console.log(JSON.stringify({ .lock() .unwrap_or_else(|poisoned| poisoned.into_inner()); let home_dir = config::resolve_tilde("~"); - if home_dir == PathBuf::from("~") { + if home_dir == Path::new("~") { return; } if !home_dir.exists() { diff --git a/sdk/src/repositories/command.rs b/sdk/src/repositories/command.rs index 318e3e03..a88f54c1 100644 --- a/sdk/src/repositories/command.rs +++ b/sdk/src/repositories/command.rs @@ -78,10 +78,9 @@ fn build_command_prompt( .or_else(|| underscore_index.map(|i| &base_name[..i])) .map(String::from); - let command_name = if parent_dir_name.is_some() || underscore_index.is_none() { - base_name.to_string() - } else { - base_name[underscore_index.unwrap() + 1..].to_string() + let command_name = match (parent_dir_name, underscore_index) { + (Some(_), _) | (_, None) => base_name.to_string(), + (None, Some(index)) => base_name[index + 1..].to_string(), }; let global_only = match compiled.metadata.get("scope") { diff --git a/sdk/src/repositories/global_memory.rs b/sdk/src/repositories/global_memory.rs index e6a41bd0..1de9aadc 100644 --- a/sdk/src/repositories/global_memory.rs +++ b/sdk/src/repositories/global_memory.rs @@ -38,7 +38,7 @@ pub fn collect_global_memory(options_json: &str) -> Result Result<() fn extract_front_matter(raw_mdx: &str) -> (Option, Option) { let front_matter_regex = regex_lite::Regex::new(r"(?s)^---\r?\n(.*?)\r?\n---(?:(?:\r?\n){1,2}|$)").ok(); - if let Some(re) = front_matter_regex { - if let Some(caps) = re.captures(raw_mdx) { - let raw_fm = caps.get(1).map(|m| m.as_str().to_string()); - let yaml_json = raw_fm - .as_deref() - .and_then(|fm| serde_yml::from_str::(fm).ok()); - return (yaml_json, raw_fm); - } + if let Some(re) = front_matter_regex + && let Some(caps) = re.captures(raw_mdx) + { + let raw_fm = caps.get(1).map(|m| m.as_str().to_string()); + let yaml_json = raw_fm + .as_deref() + .and_then(|fm| serde_yml::from_str::(fm).ok()); + return (yaml_json, raw_fm); } (None, None) } @@ -85,7 +85,7 @@ fn read_root_memory_prompt( let file_path_str = file_path.to_string_lossy().into_owned(); let artifact = read_prompt_artifact(&file_path_str, "dist", global_scope_json) - .map_err(|e| crate::CliError::ConfigError(e))?; + .map_err(crate::CliError::ConfigError)?; assert_no_residual_module_syntax(&artifact.content, &file_path_str) .map_err(crate::CliError::ConfigError)?; @@ -120,7 +120,7 @@ fn read_child_memory_prompt( let file_path_str = file_path.to_string_lossy().into_owned(); let artifact = read_prompt_artifact(&file_path_str, "dist", global_scope_json) - .map_err(|e| crate::CliError::ConfigError(e))?; + .map_err(crate::CliError::ConfigError)?; assert_no_residual_module_syntax(&artifact.content, &file_path_str) .map_err(crate::CliError::ConfigError)?; @@ -238,7 +238,7 @@ fn read_workspace_root_project_prompt( let file_path_str = file_path.to_string_lossy().into_owned(); let artifact = read_prompt_artifact(&file_path_str, "dist", global_scope_json) - .map_err(|e| crate::CliError::ConfigError(e))?; + .map_err(crate::CliError::ConfigError)?; assert_no_residual_module_syntax(&artifact.content, &file_path_str) .map_err(crate::CliError::ConfigError)?; @@ -413,7 +413,7 @@ mod tests { ) .unwrap(); - let workspace = create_workspace(&tmp.path().to_string_lossy().to_string(), vec![]); + let workspace = create_workspace(tmp.path().to_string_lossy().as_ref(), vec![]); let options = serde_json::json!({ "workspaceDir": tmp.path().to_string_lossy().to_string(), "workspace": workspace, @@ -440,7 +440,7 @@ mod tests { fs::create_dir_all(&external_dir).unwrap(); fs::write(external_dir.join("workspace.mdx"), "Wrong workspace prompt").unwrap(); - let workspace = create_workspace(&tmp.path().to_string_lossy().to_string(), vec![]); + let workspace = create_workspace(tmp.path().to_string_lossy().as_ref(), vec![]); let options = serde_json::json!({ "workspaceDir": tmp.path().to_string_lossy().to_string(), "workspace": workspace, diff --git a/sdk/src/repositories/prompt_artifact.rs b/sdk/src/repositories/prompt_artifact.rs index e385cfcb..e3776078 100644 --- a/sdk/src/repositories/prompt_artifact.rs +++ b/sdk/src/repositories/prompt_artifact.rs @@ -87,7 +87,7 @@ pub fn read_prompt_artifact( extract_metadata: true, ..Default::default() }; - let result = mdx_to_md_with_metadata(&raw_mdx, Some(opts)).map_err(|e| e)?; + let result = mdx_to_md_with_metadata(&raw_mdx, Some(opts))?; Ok(PromptArtifact { raw_mdx, diff --git a/sdk/src/repositories/readme.rs b/sdk/src/repositories/readme.rs index fba0a8c0..42a8d993 100644 --- a/sdk/src/repositories/readme.rs +++ b/sdk/src/repositories/readme.rs @@ -98,7 +98,7 @@ fn collect_readme_files_recursive( let file_path_str = file_path.to_string_lossy().into_owned(); let artifact = read_prompt_artifact(&file_path_str, "dist", global_scope_json) - .map_err(|e| crate::CliError::ConfigError(e))?; + .map_err(crate::CliError::ConfigError)?; let content = artifact.content; let length = content.len(); diff --git a/sdk/src/repositories/rule.rs b/sdk/src/repositories/rule.rs index 7856b810..54d93051 100644 --- a/sdk/src/repositories/rule.rs +++ b/sdk/src/repositories/rule.rs @@ -109,7 +109,7 @@ fn build_rule_prompt( let prefix = normalized_name.split('/').next().unwrap_or("").to_string(); let rule_name = normalized_name .split('/') - .last() + .next_back() .unwrap_or(&normalized_name) .to_string(); diff --git a/sdk/src/repositories/shared_ignore.rs b/sdk/src/repositories/shared_ignore.rs index 659a5a61..1b04dbc7 100644 --- a/sdk/src/repositories/shared_ignore.rs +++ b/sdk/src/repositories/shared_ignore.rs @@ -45,20 +45,20 @@ pub fn collect_shared_ignore(options_json: &str) -> Result = Vec::new(); for file_name in AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS { - if let Some(content) = read_public_file(&aindex_dir_str, file_name) { - if !content.is_empty() { - let proxied_name = proxy_public_path(file_name); - let source_path = std::path::Path::new(&aindex_dir_str) - .join("public") - .join(&proxied_name) - .to_string_lossy() - .into_owned(); - results.push(AIAgentIgnoreConfigFile { - file_name: file_name.to_string(), - content, - source_path, - }); - } + if let Some(content) = read_public_file(&aindex_dir_str, file_name) + && !content.is_empty() + { + let proxied_name = proxy_public_path(file_name); + let source_path = std::path::Path::new(&aindex_dir_str) + .join("public") + .join(&proxied_name) + .to_string_lossy() + .into_owned(); + results.push(AIAgentIgnoreConfigFile { + file_name: file_name.to_string(), + content, + source_path, + }); } } diff --git a/sdk/src/repositories/skill.rs b/sdk/src/repositories/skill.rs index 3e70c122..672cde19 100644 --- a/sdk/src/repositories/skill.rs +++ b/sdk/src/repositories/skill.rs @@ -76,14 +76,14 @@ fn assert_no_residual_module_syntax(content: &str, file_path: &str) -> Result<() fn extract_front_matter(raw_mdx: &str) -> (Option, Option) { let front_matter_regex = regex_lite::Regex::new(r"(?s)^---\r?\n(.*?)\r?\n---(?:(?:\r?\n){1,2}|$)").ok(); - if let Some(re) = front_matter_regex { - if let Some(caps) = re.captures(raw_mdx) { - let raw_fm = caps.get(1).map(|m| m.as_str().to_string()); - let yaml_json = raw_fm - .as_deref() - .and_then(|fm| serde_yml::from_str::(fm).ok()); - return (yaml_json, raw_fm); - } + if let Some(re) = front_matter_regex + && let Some(caps) = re.captures(raw_mdx) + { + let raw_fm = caps.get(1).map(|m| m.as_str().to_string()); + let yaml_json = raw_fm + .as_deref() + .and_then(|fm| serde_yml::from_str::(fm).ok()); + return (yaml_json, raw_fm); } (None, None) } @@ -99,62 +99,62 @@ fn extract_skill_metadata_from_export(content: &str) -> Value { let description_regex = regex_lite::Regex::new(r#"description\s*:\s*['\"`]([^'\"`]+)['\"`]"#).unwrap(); - if let Some(caps) = description_regex.captures(object_content) { - if let Some(m) = caps.get(1) { - metadata.insert( - "description".to_string(), - Value::String(m.as_str().to_string()), - ); - } + if let Some(caps) = description_regex.captures(object_content) + && let Some(m) = caps.get(1) + { + metadata.insert( + "description".to_string(), + Value::String(m.as_str().to_string()), + ); } let name_regex = regex_lite::Regex::new(r#"name\s*:\s*['\"`]([^'\"`]+)['\"`]"#).unwrap(); - if let Some(caps) = name_regex.captures(object_content) { - if let Some(m) = caps.get(1) { - metadata.insert("name".to_string(), Value::String(m.as_str().to_string())); - } + if let Some(caps) = name_regex.captures(object_content) + && let Some(m) = caps.get(1) + { + metadata.insert("name".to_string(), Value::String(m.as_str().to_string())); } let display_name_regex = regex_lite::Regex::new(r#"displayName\s*:\s*['\"`]([^'\"`]+)['\"`]"#).unwrap(); - if let Some(caps) = display_name_regex.captures(object_content) { - if let Some(m) = caps.get(1) { - metadata.insert( - "displayName".to_string(), - Value::String(m.as_str().to_string()), - ); - } + if let Some(caps) = display_name_regex.captures(object_content) + && let Some(m) = caps.get(1) + { + metadata.insert( + "displayName".to_string(), + Value::String(m.as_str().to_string()), + ); } let keywords_regex = regex_lite::Regex::new(r"keywords\s*:\s*\[([^\]]+)\]").unwrap(); - if let Some(caps) = keywords_regex.captures(object_content) { - if let Some(m) = caps.get(1) { - let keywords: Vec = m - .as_str() - .split(',') - .map(|k| { - k.trim() - .trim_matches(|c: char| c == '"' || c == '\'' || c == '`') - }) - .filter(|k| !k.is_empty()) - .map(|k| Value::String(k.to_string())) - .collect(); - metadata.insert("keywords".to_string(), Value::Array(keywords)); - } + if let Some(caps) = keywords_regex.captures(object_content) + && let Some(m) = caps.get(1) + { + let keywords: Vec = m + .as_str() + .split(',') + .map(|k| { + k.trim() + .trim_matches(|c: char| c == '"' || c == '\'' || c == '`') + }) + .filter(|k| !k.is_empty()) + .map(|k| Value::String(k.to_string())) + .collect(); + metadata.insert("keywords".to_string(), Value::Array(keywords)); } let author_regex = regex_lite::Regex::new(r#"author\s*:\s*['\"`]([^'\"`]+)['\"`]"#).unwrap(); - if let Some(caps) = author_regex.captures(object_content) { - if let Some(m) = caps.get(1) { - metadata.insert("author".to_string(), Value::String(m.as_str().to_string())); - } + if let Some(caps) = author_regex.captures(object_content) + && let Some(m) = caps.get(1) + { + metadata.insert("author".to_string(), Value::String(m.as_str().to_string())); } let version_regex = regex_lite::Regex::new(r#"version\s*:\s*['\"`]([^'\"`]+)['\"`]"#).unwrap(); - if let Some(caps) = version_regex.captures(object_content) { - if let Some(m) = caps.get(1) { - metadata.insert("version".to_string(), Value::String(m.as_str().to_string())); - } + if let Some(caps) = version_regex.captures(object_content) + && let Some(m) = caps.get(1) + { + metadata.insert("version".to_string(), Value::String(m.as_str().to_string())); } Value::Object(metadata) @@ -298,7 +298,7 @@ fn scan_child_docs( let file_path_str = path.to_string_lossy().into_owned(); let artifact = read_prompt_artifact(&file_path_str, "dist", global_scope_json) - .map_err(|e| crate::CliError::ConfigError(e))?; + .map_err(crate::CliError::ConfigError)?; let compiled_content = transform_mdx_references_to_md(&artifact.content); assert_no_residual_module_syntax(&compiled_content, &file_path_str) .map_err(crate::CliError::ConfigError)?; @@ -336,7 +336,6 @@ fn scan_child_docs( fn scan_resources( current_dir: &Path, root_src_dir: &Path, - skill_dir: &str, ) -> Result, crate::CliError> { let mut resources = Vec::new(); let entries = match std::fs::read_dir(current_dir) { @@ -347,7 +346,7 @@ fn scan_resources( for entry in entries.flatten() { let path = entry.path(); if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { - resources.extend(scan_resources(&path, root_src_dir, skill_dir)?); + resources.extend(scan_resources(&path, root_src_dir)?); continue; } let Some(file_name) = path.file_name().and_then(|s| s.to_str()) else { @@ -435,8 +434,9 @@ fn assert_compiled_child_docs_exist( } let src_path = skill_src_dir.join(relative_path.replace(".mdx", ".src.mdx")); return Err(crate::CliError::ConfigError(format!( - "Missing compiled prompt for skill child doc \"{}\". source: {} expected compiled: {}", - format!("{}/{}", skill_name, relative_path), + "Missing compiled prompt for skill child doc \"{}/{}\". source: {} expected compiled: {}", + skill_name, + relative_path, src_path.to_string_lossy(), compiled_path.to_string_lossy() ))); @@ -552,7 +552,7 @@ fn create_skill_prompt( let compiled_file_path_str = compiled_file_path.to_string_lossy().into_owned(); let artifact = read_prompt_artifact(&compiled_file_path_str, "dist", global_scope_json) - .map_err(|e| crate::CliError::ConfigError(e))?; + .map_err(crate::CliError::ConfigError)?; let raw_content = artifact.raw_mdx.clone(); let content = transform_mdx_references_to_md(&artifact.content); @@ -560,7 +560,7 @@ fn create_skill_prompt( .map_err(crate::CliError::ConfigError)?; let export_metadata = extract_skill_metadata_from_export(&raw_content); - let dist_metadata = Value::Object(artifact.metadata.into_iter().map(|(k, v)| (k, v)).collect()); + let dist_metadata = Value::Object(artifact.metadata.into_iter().collect()); let merged_metadata = merge_defined_skill_metadata(&[Some(export_metadata), Some(dist_metadata)]); let (yaml_front_matter, _raw_front_matter) = extract_front_matter(&raw_content); @@ -609,7 +609,7 @@ fn create_skill_prompt( let child_docs = scan_child_docs(skill_dir, skill_dir, &skill_dir_str, global_scope_json)?; let resources = if skill_dir.is_dir() { - scan_resources(skill_dir, skill_dir, &skill_dir_str)? + scan_resources(skill_dir, skill_dir)? } else { vec![] }; @@ -653,12 +653,12 @@ pub fn collect_skill(options_json: &str) -> Result { let mut skill_names: Vec = Vec::new(); - if skills_dir.is_dir() { - if let Ok(entries) = std::fs::read_dir(&skills_dir) { - for entry in entries.flatten() { - if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { - skill_names.push(entry.file_name().to_string_lossy().into_owned()); - } + if skills_dir.is_dir() + && let Ok(entries) = std::fs::read_dir(&skills_dir) + { + for entry in entries.flatten() { + if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { + skill_names.push(entry.file_name().to_string_lossy().into_owned()); } } } diff --git a/sdk/src/services/clean_service.rs b/sdk/src/services/clean_service.rs index aa0c1bfe..11f78c3d 100644 --- a/sdk/src/services/clean_service.rs +++ b/sdk/src/services/clean_service.rs @@ -15,13 +15,16 @@ use crate::services::common::{ }; use crate::{CliError, MemorySyncCommandOptions, MemorySyncCommandResult}; +type CleanupOutputMap = HashMap>; +type CleanupDeclarationMap = HashMap; + pub fn clean(options: MemorySyncCommandOptions) -> Result { let logger = create_logger( "clean", options .log_level .as_deref() - .and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s)), + .and_then(crate::infra::logger::LogLevel::from_str_loose), ); let _span = logger.span("command.clean").enter(); @@ -93,7 +96,7 @@ pub fn clean(options: MemorySyncCommandOptions) -> Result>(); @@ -146,7 +149,7 @@ pub fn clean(options: MemorySyncCommandOptions) -> Result Result< - ( - HashMap>, - HashMap, - ), - CliError, -> { - let mut output_map: HashMap> = HashMap::new(); - let mut cleanup_map: HashMap = HashMap::new(); +) -> Result<(CleanupOutputMap, CleanupDeclarationMap), CliError> { + let mut output_map: CleanupOutputMap = HashMap::new(); + let mut cleanup_map: CleanupDeclarationMap = HashMap::new(); let base_span = logger.span("output.build").enter(); let base_plans = crate::domain::base_output_plans::build_base_output_plans(context)?; for plan in &base_plans.plugins { cleanup_map .entry(plan.plugin_name.clone()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.is_enabled(plan.plugin_name.as_str()) { @@ -320,7 +317,7 @@ fn build_output_map( { cleanup_map .entry("ClaudeCodeCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.claude_code { @@ -336,7 +333,7 @@ fn build_output_map( { cleanup_map .entry("CodexCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.codex { @@ -353,7 +350,7 @@ fn build_output_map( { cleanup_map .entry("CursorOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.cursor { @@ -369,7 +366,7 @@ fn build_output_map( { cleanup_map .entry("DroidCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.droid { @@ -386,7 +383,7 @@ fn build_output_map( { cleanup_map .entry("GeminiCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.gemini { @@ -399,7 +396,7 @@ fn build_output_map( } } if let Ok(plan) = crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context) { - cleanup_map.entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); + cleanup_map.entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()).or_default().delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.jetbrains { for file in &plan.output_files { output_map.entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } @@ -407,7 +404,7 @@ fn build_output_map( if let Ok(plan) = crate::domain::output_plans::kiro_output_plan::build_kiro_output_plan(context) { cleanup_map .entry("KiroCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.kiro { @@ -424,7 +421,7 @@ fn build_output_map( { cleanup_map .entry("OpencodeCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.opencode { @@ -440,7 +437,7 @@ fn build_output_map( { cleanup_map .entry("QoderIDEPluginOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.qoder { @@ -455,7 +452,7 @@ fn build_output_map( if let Ok(plan) = crate::domain::output_plans::trae_output_plan::build_trae_output_plan(context) { cleanup_map .entry("TraeOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.trae || enabled_plugins.trae_cn { @@ -470,7 +467,7 @@ fn build_output_map( if let Ok(plan) = crate::domain::output_plans::warp_output_plan::build_warp_output_plan(context) { cleanup_map .entry("WarpIDEOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.warp { @@ -487,7 +484,7 @@ fn build_output_map( { cleanup_map .entry("WindsurfOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) + .or_default() .delete .extend(plan.cleanup.delete.clone()); if enabled_plugins.windsurf { diff --git a/sdk/src/services/dry_run_service.rs b/sdk/src/services/dry_run_service.rs index 39ac29fd..16a818ca 100644 --- a/sdk/src/services/dry_run_service.rs +++ b/sdk/src/services/dry_run_service.rs @@ -27,7 +27,7 @@ pub fn dry_run(options: MemorySyncCommandOptions) -> Result Result, + protection_snapshot: &CleanupSnapshot, logger: &Logger, ) -> Result { let mut files_affected = 0usize; @@ -297,7 +300,21 @@ fn write_output_files( for file in outputs.values() { let path = Path::new(&file.path); - match prepare_target_path(path, &mut warnings) { + if let Some(violation) = + crate::policy::cleanup::detect_protected_path_violation(protection_snapshot, &file.path) + .map_err(CliError::ExecutionError)? + { + errors.push(json!({ + "path": file.path, + "protected": violation.protected_path, + "reason": violation.reason, + "source": violation.source, + "error": "Refusing to write protected path.", + })); + continue; + } + + match prepare_target_path(path, protection_snapshot, &mut warnings) { Ok(created_dirs) => { dirs_affected += created_dirs; } @@ -359,13 +376,63 @@ fn render_bytes(file: &PlannedOutputFile) -> Result, CliError> { } } -fn prepare_target_path(path: &Path, warnings: &mut Vec) -> Result { +fn build_install_protection_snapshot( + workspace_dir: &str, + outputs: &BTreeMap, +) -> CleanupSnapshot { + CleanupSnapshot { + workspace_dir: workspace_dir.to_string(), + aindex_dir: Some( + crate::domain::config::resolve_workspace_aindex_dir(workspace_dir) + .to_string_lossy() + .into_owned(), + ), + project_roots: discover_install_project_roots(workspace_dir, outputs), + protected_rules: Vec::new(), + plugin_snapshots: Vec::new(), + empty_dir_exclude_globs: Vec::new(), + } +} + +fn discover_install_project_roots( + workspace_dir: &str, + outputs: &BTreeMap, +) -> Vec { + let workspace = Path::new(workspace_dir); + let mut roots = outputs + .values() + .filter_map(|file| { + Path::new(&file.path) + .strip_prefix(workspace) + .ok() + .and_then(|relative| relative.components().next()) + .map(|component| workspace.join(component.as_os_str()).to_string_lossy().into_owned()) + }) + .collect::>(); + roots.sort(); + roots.dedup(); + roots +} + +fn prepare_target_path( + path: &Path, + protection_snapshot: &CleanupSnapshot, + warnings: &mut Vec, +) -> Result { let mut created_dirs = 0usize; if let Some(parent) = path.parent() { if let Some(blocking) = path_blocking::find_blocking_non_directory_path(&parent.to_string_lossy()) { + if let Some(violation) = + crate::policy::cleanup::detect_protected_path_violation(protection_snapshot, &blocking)? + { + return Err(format!( + "Refusing to delete protected blocking path {} (protected: {}, reason: {})", + blocking, violation.protected_path, violation.reason + )); + } desk_paths::delete_path_sync(&blocking).map_err(|error| error.to_string())?; warnings.push(json!({ "path": blocking, @@ -380,6 +447,15 @@ fn prepare_target_path(path: &Path, warnings: &mut Vec) -> Result) -> Result Result<(), String> { Command::Lint => { println!("[xtask] Running fmt check..."); run_cargo(&["fmt", "--check"])?; - println!("[xtask] Linting completed (clippy skipped - pre-existing warnings)."); - // run_cargo(&["clippy", "--workspace", "--", "-D", "warnings"])?; - // println!("[xtask] Running clippy..."); - // println!("[xtask] Linting completed."); + println!("[xtask] Running clippy..."); + run_cargo(&["clippy", "--workspace", "--all-targets", "--", "-D", "warnings"])?; + println!("[xtask] Linting completed."); } Command::CheckType => { println!("[xtask] Running type checking..."); @@ -214,6 +213,7 @@ fn main() -> Result<(), String> { println!("[xtask] Running full check..."); run_cargo(&["fmt", "--check"])?; run_cargo(&["check", "--workspace", "--exclude", "tnmsg"])?; + run_cargo(&["clippy", "--workspace", "--all-targets", "--", "-D", "warnings"])?; run_cargo(&[ "test", "--workspace", From 3c14c01e9bbb2a68361ef1257a84842c990ef883 Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 17:10:53 -0700 Subject: [PATCH 08/45] fix(transformer): require basename to contain '.' before simplifying link text MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #195. Pre-fix the link-text simplification gate was \`t.value.contains('/') && t.value.contains('.')\`. That fired on strings like \`v1.0/release\` (has \`/\`, has \`.\`) and rewrote them to \`release\` — losing the version segment that was the actual point of the link text. Tighten the heuristic: require the *basename* (the segment after the final \`/\`) to itself contain a \`.\`. Now: - \`docs/guide/intro.md\` → basename \`intro.md\` (has \`.\`) → simplified to \`intro.md\`. (Unchanged.) - \`v1.0/release\` → basename \`release\` (no \`.\`) → left alone. - \`a/b/c\` → basename \`c\` (no \`.\`, no \`/\`-after-last-/) → left alone (was already left alone). Existing transformer tests still pass (12/12 in \`infra::md_compiler::transformer::tests\`). (cherry picked from commit 5323d46f4ac086918ef0fb70f132056f847c54c9) --- sdk/src/infra/md_compiler/transformer.rs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/sdk/src/infra/md_compiler/transformer.rs b/sdk/src/infra/md_compiler/transformer.rs index 83154ce9..418e8aa6 100644 --- a/sdk/src/infra/md_compiler/transformer.rs +++ b/sdk/src/infra/md_compiler/transformer.rs @@ -922,14 +922,24 @@ fn transform_children(children: &[Node], ctx: &ProcessingContext) -> Vec { } Node::Link(link) => { let new_children = transform_inline_children(&link.children, ctx); - // Simplify link text that looks like file paths + // Simplify link text that looks like a file path + // (`docs/guide/intro.md` → `intro.md`). + // + // Pre-#195 the guard was `contains('/') && contains('.')`, which + // also matched version-prefixed strings like `v1.0/release` and + // simplified them to `release` — losing the version segment + // that was the actual point of the link text. Tighten the + // heuristic to require the *basename* (the segment after the + // final `/`) to itself contain a `.`. That way a real file + // path (`a/b.md` → basename `b.md`) still simplifies, but + // `v1.0/release` (basename `release`, no dot) is left alone. let simplified = new_children .into_iter() .map(|c| { if let Node::Text(t) = &c && t.value.contains('/') - && t.value.contains('.') && let Some(basename) = t.value.rsplit('/').next() + && basename.contains('.') { return Node::Text(Text { value: basename.to_string(), From 84e639e73b5dcdfca56126b201154a616ce391e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 11:47:33 +0800 Subject: [PATCH 09/45] test(transformer): add regression coverage for issue 249 --- sdk/src/infra/md_compiler/transformer.rs | 62 +++++++++++++++++------- 1 file changed, 44 insertions(+), 18 deletions(-) diff --git a/sdk/src/infra/md_compiler/transformer.rs b/sdk/src/infra/md_compiler/transformer.rs index 418e8aa6..f293e001 100644 --- a/sdk/src/infra/md_compiler/transformer.rs +++ b/sdk/src/infra/md_compiler/transformer.rs @@ -734,6 +734,25 @@ fn convert_strong_text_element( })]) } +fn simplify_link_text_children(children: Vec) -> Vec { + children + .into_iter() + .map(|c| { + if let Node::Text(t) = &c + && t.value.contains('/') + && let Some(basename) = t.value.rsplit('/').next() + && basename.contains('.') + { + return Node::Text(Text { + value: basename.to_string(), + position: t.position.clone(), + }); + } + c + }) + .collect() +} + fn convert_emphasis_element( element: &MdxJsxFlowElement, ctx: &ProcessingContext, @@ -925,7 +944,8 @@ fn transform_children(children: &[Node], ctx: &ProcessingContext) -> Vec { // Simplify link text that looks like a file path // (`docs/guide/intro.md` → `intro.md`). // - // Pre-#195 the guard was `contains('/') && contains('.')`, which + // #249 fixes the over-eager link-text simplification bug. + // Pre-#249 the guard was `contains('/') && contains('.')`, which // also matched version-prefixed strings like `v1.0/release` and // simplified them to `release` — losing the version segment // that was the actual point of the link text. Tighten the @@ -933,22 +953,7 @@ fn transform_children(children: &[Node], ctx: &ProcessingContext) -> Vec { // final `/`) to itself contain a `.`. That way a real file // path (`a/b.md` → basename `b.md`) still simplifies, but // `v1.0/release` (basename `release`, no dot) is left alone. - let simplified = new_children - .into_iter() - .map(|c| { - if let Node::Text(t) = &c - && t.value.contains('/') - && let Some(basename) = t.value.rsplit('/').next() - && basename.contains('.') - { - return Node::Text(Text { - value: basename.to_string(), - position: t.position.clone(), - }); - } - c - }) - .collect(); + let simplified = simplify_link_text_children(new_children); let evaluated_url = evaluate_interpolations(&link.url, &ctx.scope); result.push(Node::Link(Link { children: simplified, @@ -1109,9 +1114,10 @@ fn transform_inline_children(children: &[Node], ctx: &ProcessingContext) -> Vec< } Node::Link(link) => { let new_children = transform_inline_children(&link.children, ctx); + let simplified = simplify_link_text_children(new_children); let evaluated_url = evaluate_interpolations(&link.url, &ctx.scope); result.push(Node::Link(Link { - children: new_children, + children: simplified, position: link.position.clone(), url: evaluated_url, title: link.title.clone(), @@ -1263,6 +1269,26 @@ mod tests { ); } + #[test] + fn test_link_text_keeps_version_prefixed_path_for_issue_249() { + let result = compile("[v1.0/release](https://example.com)\n", make_scope()); + assert!( + result.contains("[v1.0/release](https://example.com)"), + "Got: {}", + result + ); + } + + #[test] + fn test_link_text_still_simplifies_real_file_path_after_issue_249() { + let result = compile("[docs/guide/intro.md](https://example.com)\n", make_scope()); + assert!( + result.contains("[intro.md](https://example.com)"), + "Got: {}", + result + ); + } + #[test] fn test_image_url_interpolation() { let result = compile("![logo](https://{tool.name}.com/logo.png)\n", make_scope()); From ba28a1993409c14d27ecb5769ec7a3131b983d61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 12:02:48 +0800 Subject: [PATCH 10/45] test(cleanup): cover issue 250 slash normalization --- sdk/src/policy/cleanup.rs | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/sdk/src/policy/cleanup.rs b/sdk/src/policy/cleanup.rs index e720ec88..c95ff1ab 100644 --- a/sdk/src/policy/cleanup.rs +++ b/sdk/src/policy/cleanup.rs @@ -138,7 +138,14 @@ fn normalize_glob_pattern(pattern: &str) -> String { fn normalize_relative_glob_pattern(pattern: &str) -> String { let normalized = pattern.replace('\\', "/"); let normalized = normalized.trim_start_matches("./"); - normalized.trim_start_matches('/').to_string() + // #250 fixes over-stripping of leading slashes in relative glob + // normalization. A relative `/foo` should become `foo`, but `//foo` + // must not collapse all the way to `foo` because that changes the + // pattern shape seen by downstream matching. + match normalized.strip_prefix('/') { + Some(rest) => rest.to_string(), + None => normalized.to_string(), + } } fn normalize_workspace_relative_path(path: &Path, workspace_dir: &Path) -> Option { @@ -3183,4 +3190,13 @@ mod tests { // The aindex directory must still exist assert!(aindex_dir.exists()); } + + /// Regression for #250: normalizing a relative glob must strip exactly + /// one leading slash so `//foo` does not collapse to `foo`. + #[test] + fn regression_normalize_relative_glob_pattern_preserves_double_leading_slash_shape() { + assert_eq!(normalize_relative_glob_pattern("/foo"), "foo"); + assert_eq!(normalize_relative_glob_pattern("./foo"), "foo"); + assert_eq!(normalize_relative_glob_pattern("//foo"), "/foo"); + } } From 679e083028b33c225ab4e8bda8eb5ff4ef6ee838 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 12:09:50 +0800 Subject: [PATCH 11/45] test(cleanup): cover issue 251 bang glob detection --- sdk/src/policy/cleanup.rs | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/sdk/src/policy/cleanup.rs b/sdk/src/policy/cleanup.rs index c95ff1ab..3c0c088c 100644 --- a/sdk/src/policy/cleanup.rs +++ b/sdk/src/policy/cleanup.rs @@ -276,13 +276,16 @@ fn build_globset(patterns: &[String]) -> Result, String> { } fn has_glob_magic(value: &str) -> bool { - value.contains('*') + // #251 fixes over-classifying literal `!` filename segments as glob + // syntax. In globset, `!` only has special meaning as a leading + // pattern negation marker, not in the middle of a path segment. + value.starts_with('!') + || value.contains('*') || value.contains('?') || value.contains('[') || value.contains(']') || value.contains('{') || value.contains('}') - || value.contains('!') } fn detect_glob_scan_root(pattern: &str) -> PathBuf { @@ -3199,4 +3202,14 @@ mod tests { assert_eq!(normalize_relative_glob_pattern("./foo"), "foo"); assert_eq!(normalize_relative_glob_pattern("//foo"), "/foo"); } + + /// Regression for #251: a literal `!` in the middle of a filename must + /// not classify the path as a glob pattern. + #[test] + fn regression_has_glob_magic_treats_only_leading_bang_as_magic() { + assert!(has_glob_magic("!foo/**")); + assert!(!has_glob_magic("/home/user/!important/file.txt")); + assert!(!has_glob_magic("name!suffix")); + assert!(!has_glob_magic("a!b")); + } } From a8c5d4aa418de31f7a63584d5f0d5d8791c87e11 Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 19:27:12 -0700 Subject: [PATCH 12/45] cleanup(cleanup): drop unused _include_reserved_workspace_content_roots arg Closes #208. \`collect_workspace_reserved_rules\` took a third parameter \`_include_reserved_workspace_content_roots: bool\` whose underscore prefix advertised the lie: the body never read it, both call sites just passed a literal \`true\`, and the only test for it (\`include_reserved_workspace_content_roots_is_inert\`) asserted that toggling the value didn't change the output. Dead surface area. Drop the parameter from the signature, the production call site (\`create_guard\`), and the test that probed it. The replacement test would have to assert "the function still returns rules", which is already covered by the 29 other \`policy::cleanup\` tests that exercise the guard end-to-end. \`cargo test --lib policy::cleanup\` is green at 30/30. (cherry picked from commit 71ec4c771955a1bc52bc59b3033100c1f47ea79e) --- sdk/src/policy/cleanup.rs | 21 ++++----------------- 1 file changed, 4 insertions(+), 17 deletions(-) diff --git a/sdk/src/policy/cleanup.rs b/sdk/src/policy/cleanup.rs index 3c0c088c..601ab2bd 100644 --- a/sdk/src/policy/cleanup.rs +++ b/sdk/src/policy/cleanup.rs @@ -775,7 +775,6 @@ fn collect_built_in_dangerous_path_rules() -> Vec { fn collect_workspace_reserved_rules( workspace_dir: &str, project_roots: &[String], - _include_reserved_workspace_content_roots: bool, ) -> Vec { let workspace_dir = path_to_string(&resolve_absolute_path(workspace_dir)); let aindex_dir = path_to_string(&config::resolve_workspace_aindex_dir(&workspace_dir)); @@ -825,7 +824,6 @@ fn create_guard( all_rules.extend(collect_workspace_reserved_rules( &snapshot.workspace_dir, &snapshot.project_roots, - true, )); if let Some(aindex_dir) = snapshot.aindex_dir.as_ref() { @@ -2090,21 +2088,10 @@ mod tests { } } - #[test] - fn include_reserved_workspace_content_roots_is_inert() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let aindex_dir = workspace_dir.join("aindex"); - fs::create_dir_all(&aindex_dir).unwrap(); - - let rules_with_content = - collect_workspace_reserved_rules(&path_to_string(&workspace_dir), &[], true); - let rules_without_content = - collect_workspace_reserved_rules(&path_to_string(&workspace_dir), &[], false); - - assert_eq!(rules_with_content.len(), rules_without_content.len()); - assert_eq!(rules_with_content, rules_without_content); - } + // Pre-#208 the function took an `_include_reserved_workspace_content_roots` + // parameter that the body never read; the surrounding test asserted that + // it didn't change the result regardless of value. With the dead parameter + // removed the test is no longer meaningful, so it's been deleted. #[test] fn blocks_aindex_root_but_allows_deep_descendant_deletion() { From c4cd977b63fafbd0e4d2114ae23b84ab166cd162 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 12:19:31 +0800 Subject: [PATCH 13/45] test(localized_reader): cover issue 253 grouping behavior --- sdk/src/repositories/localized_reader.rs | 44 ++++++++++++++++++++++-- 1 file changed, 41 insertions(+), 3 deletions(-) diff --git a/sdk/src/repositories/localized_reader.rs b/sdk/src/repositories/localized_reader.rs index fe2222a8..2402b7ba 100644 --- a/sdk/src/repositories/localized_reader.rs +++ b/sdk/src/repositories/localized_reader.rs @@ -1,4 +1,4 @@ -use std::collections::HashSet; +use std::collections::{HashMap, HashSet}; use std::path::Path; use crate::repositories::prompt_artifact::{PromptArtifact, read_prompt_artifact}; @@ -17,6 +17,9 @@ pub fn read_flat_files( ) -> Result, crate::CliError> { let mut entries: Vec = Vec::new(); let mut seen: HashSet = HashSet::new(); + // #253 replaces linear name lookup with an index so adding localized + // variants does not degenerate into an O(n²) walk over `entries`. + let mut by_name: HashMap = HashMap::new(); let dir_path = Path::new(dir); if dir_path.is_dir() { @@ -24,6 +27,7 @@ pub fn read_flat_files( dir_path, dir_path, &mut seen, + &mut by_name, &mut entries, global_scope_json, )?; @@ -36,6 +40,7 @@ fn scan_directory( root: &Path, current: &Path, seen: &mut HashSet, + by_name: &mut HashMap, entries: &mut Vec, global_scope_json: Option<&str>, ) -> Result<(), crate::CliError> { @@ -43,7 +48,7 @@ fn scan_directory( let entry = entry.map_err(crate::CliError::IoError)?; let path = entry.path(); if path.is_dir() { - scan_directory(root, &path, seen, entries, global_scope_json)?; + scan_directory(root, &path, seen, by_name, entries, global_scope_json)?; continue; } let Some(file_name) = path.file_name().and_then(|s| s.to_str()) else { @@ -93,7 +98,8 @@ fn scan_directory( ) .map_err(crate::CliError::ConfigError)?; - if let Some(existing) = entries.iter_mut().find(|e| e.name == full_name) { + if let Some(&idx) = by_name.get(&full_name) { + let existing = &mut entries[idx]; if is_zh_source { existing.src_zh = Some(artifact); } else if is_en_source { @@ -103,6 +109,7 @@ fn scan_directory( } } else { seen.insert(full_name.clone()); + by_name.insert(full_name.clone(), entries.len()); let mut e = FlatFileEntry { name: full_name, compiled: None, @@ -121,3 +128,34 @@ fn scan_directory( } Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::tempdir; + + #[test] + fn regression_issue_253_read_flat_files_keeps_localized_variants_grouped() { + let temp_dir = tempdir().unwrap(); + let rules_dir = temp_dir.path().join("rules").join("nested"); + fs::create_dir_all(&rules_dir).unwrap(); + + fs::write(rules_dir.join("alpha.zh.src.mdx"), "zh source").unwrap(); + fs::write(rules_dir.join("alpha.en.src.mdx"), "en source").unwrap(); + fs::write(rules_dir.join("alpha.mdx"), "compiled").unwrap(); + + let entries = read_flat_files( + temp_dir.path().join("rules").to_str().unwrap(), + None, + ) + .unwrap(); + + assert_eq!(entries.len(), 1); + let entry = &entries[0]; + assert_eq!(entry.name, "nested/alpha"); + assert!(entry.src_zh.is_some()); + assert!(entry.src_en.is_some()); + assert!(entry.compiled.is_some()); + } +} From d69f2ecf57435678b6280bae021e6a42716edb36 Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 19:55:43 -0700 Subject: [PATCH 14/45] refactor(repositories): hoist assert_no_residual_module_syntax to prompt_artifact MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #198. \`assert_no_residual_module_syntax\` was identically duplicated in \`skill.rs\` and \`project_prompt.rs\`. Both bodies walked the same \`code_fence_pattern\` / \`residual_patterns\` regex setup and built the same error message — any bug fix had to be applied in two places. Hoist the function into \`prompt_artifact.rs\` (which both consumers already import from for \`read_prompt_artifact\`) and re-export it. Drop both copies; \`use ::{assert_no_residual_module_syntax, read_prompt_artifact}\` covers the call sites. Test coverage moves with the helper — five new tests in \`prompt_artifact::tests\` lock in the contract: - clean markdown passes - \`export default\` is rejected with file:line in the message - \`import\` is rejected with file:line - fenced code (\`\`\`js / \`\`\`) examples are skipped (legitimately quoted JS, not residual module syntax) - tilde fences (\`~~~ts\`) are also skipped `cargo test --lib repositories` is green at 50/50 (45 prior + 5 new prompt_artifact tests). (cherry picked from commit c66cdd764d86972c2bee99ceea138074fcafc70f) --- sdk/src/repositories/project_prompt.rs | 37 +---------- sdk/src/repositories/prompt_artifact.rs | 81 ++++++++++++++++++++++++- sdk/src/repositories/skill.rs | 37 +---------- 3 files changed, 82 insertions(+), 73 deletions(-) diff --git a/sdk/src/repositories/project_prompt.rs b/sdk/src/repositories/project_prompt.rs index 6868387d..a337e389 100644 --- a/sdk/src/repositories/project_prompt.rs +++ b/sdk/src/repositories/project_prompt.rs @@ -8,7 +8,7 @@ use crate::domain::plugin_shared::{ FilePathKind, Project, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt, PromptKind, RelativePath, RootPath, Workspace, }; -use crate::repositories::prompt_artifact::read_prompt_artifact; +use crate::repositories::prompt_artifact::{assert_no_residual_module_syntax, read_prompt_artifact}; #[derive(Debug, Clone, Default, Deserialize)] #[serde(rename_all = "camelCase")] @@ -24,41 +24,6 @@ const SERIES_NAMES: &[&str] = config::DEFAULT_PROJECT_SERIES; const PROJECT_MEMORY_FILE: &str = "agt.mdx"; const SCAN_SKIP_DIRECTORIES: &[&str] = &["node_modules", ".git"]; -fn assert_no_residual_module_syntax(content: &str, file_path: &str) -> Result<(), String> { - let code_fence_pattern = regex_lite::Regex::new(r"^\s*(```|~~~)").unwrap(); - let residual_patterns = [ - regex_lite::Regex::new(r"^\s*export\s+default\b").unwrap(), - regex_lite::Regex::new(r"^\s*export\s+const\b").unwrap(), - regex_lite::Regex::new(r"^\s*import\b").unwrap(), - ]; - let mut active_fence: Option<&str> = None; - for (index, line) in content.lines().enumerate() { - if let Some(caps) = code_fence_pattern.captures(line) { - let marker = caps.get(1).map(|m| m.as_str()).unwrap_or(""); - if active_fence.is_none() { - active_fence = Some(marker); - } else if active_fence == Some(marker) { - active_fence = None; - } - continue; - } - if active_fence.is_some() { - continue; - } - for pat in &residual_patterns { - if pat.is_match(line) { - return Err(format!( - "Compiled prompt still contains residual module syntax at {}:{}: {}", - file_path, - index + 1, - line.trim() - )); - } - } - } - Ok(()) -} - fn extract_front_matter(raw_mdx: &str) -> (Option, Option) { let front_matter_regex = regex_lite::Regex::new(r"(?s)^---\r?\n(.*?)\r?\n---(?:(?:\r?\n){1,2}|$)").ok(); diff --git a/sdk/src/repositories/prompt_artifact.rs b/sdk/src/repositories/prompt_artifact.rs index e3776078..74b453f6 100644 --- a/sdk/src/repositories/prompt_artifact.rs +++ b/sdk/src/repositories/prompt_artifact.rs @@ -105,9 +105,53 @@ pub fn read_prompt_artifact( } } +/// Walk a compiled prompt's text and reject any leftover ESM-style +/// module syntax (`export default`, `export const`, `import`) that +/// should have been stripped by `mdx_to_md_with_metadata`. Lines +/// inside fenced code blocks (``` / ~~~) are skipped so legitimately +/// quoted JS examples don't trip the check. +/// +/// Pre-#198 the same body was duplicated in both `skill.rs` and +/// `project_prompt.rs`; bug fixes had to be applied in lockstep. +/// Centralising here makes the assertion the single source of truth. +pub fn assert_no_residual_module_syntax(content: &str, file_path: &str) -> Result<(), String> { + let code_fence_pattern = regex_lite::Regex::new(r"^\s*(```|~~~)").unwrap(); + let residual_patterns = [ + regex_lite::Regex::new(r"^\s*export\s+default\b").unwrap(), + regex_lite::Regex::new(r"^\s*export\s+const\b").unwrap(), + regex_lite::Regex::new(r"^\s*import\b").unwrap(), + ]; + let mut active_fence: Option<&str> = None; + for (index, line) in content.lines().enumerate() { + if let Some(caps) = code_fence_pattern.captures(line) { + let marker = caps.get(1).map(|m| m.as_str()).unwrap_or(""); + if active_fence.is_none() { + active_fence = Some(marker); + } else if active_fence == Some(marker) { + active_fence = None; + } + continue; + } + if active_fence.is_some() { + continue; + } + for pat in &residual_patterns { + if pat.is_match(line) { + return Err(format!( + "Compiled prompt still contains residual module syntax at {}:{}: {}", + file_path, + index + 1, + line.trim() + )); + } + } + } + Ok(()) +} + #[cfg(test)] mod tests { - use super::should_compile_dist_artifact; + use super::{assert_no_residual_module_syntax, should_compile_dist_artifact}; #[test] fn plain_markdown_dist_prompt_skips_compilation() { @@ -124,4 +168,39 @@ mod tests { assert!(should_compile_dist_artifact("Platform: {os.platform}\n")); assert!(should_compile_dist_artifact("ok\n")); } + + #[test] + fn assert_no_residual_module_syntax_passes_clean_markdown() { + let body = "# Title\n\nA paragraph.\n"; + assert!(assert_no_residual_module_syntax(body, "ok.md").is_ok()); + } + + #[test] + fn assert_no_residual_module_syntax_rejects_export_default() { + let body = "# Title\nexport default { scope: 'project' }\n"; + let err = assert_no_residual_module_syntax(body, "bad.md").unwrap_err(); + assert!(err.contains("residual module syntax")); + assert!(err.contains("bad.md:2")); + } + + #[test] + fn assert_no_residual_module_syntax_rejects_import() { + let body = "import x from 'y'\n# Title\n"; + let err = assert_no_residual_module_syntax(body, "bad.md").unwrap_err(); + assert!(err.contains("bad.md:1")); + } + + #[test] + fn assert_no_residual_module_syntax_skips_code_fences() { + // `import …` inside a fenced JS example is documentation, not + // residual module syntax. + let body = "# Title\n\n```js\nimport { foo } from 'bar';\n```\n"; + assert!(assert_no_residual_module_syntax(body, "ok.md").is_ok()); + } + + #[test] + fn assert_no_residual_module_syntax_skips_tilde_fences() { + let body = "# Title\n\n~~~ts\nexport default {}\n~~~\n"; + assert!(assert_no_residual_module_syntax(body, "ok.md").is_ok()); + } } diff --git a/sdk/src/repositories/skill.rs b/sdk/src/repositories/skill.rs index 672cde19..3e939de0 100644 --- a/sdk/src/repositories/skill.rs +++ b/sdk/src/repositories/skill.rs @@ -9,7 +9,7 @@ use crate::domain::plugin_shared::{ FilePathKind, McpServerConfig, PromptKind, RelativePath, SkillChildDoc, SkillMcpConfig, SkillPrompt, SkillResource, SkillResourceEncoding, SkillYAMLFrontMatter, }; -use crate::repositories::prompt_artifact::read_prompt_artifact; +use crate::repositories::prompt_artifact::{assert_no_residual_module_syntax, read_prompt_artifact}; #[derive(Debug, Clone, Default, Deserialize)] #[serde(rename_all = "camelCase")] @@ -38,41 +38,6 @@ fn transform_mdx_references_to_md(content: &str) -> String { .into_owned() } -fn assert_no_residual_module_syntax(content: &str, file_path: &str) -> Result<(), String> { - let code_fence_pattern = regex_lite::Regex::new(r"^\s*(```|~~~)").unwrap(); - let residual_patterns = [ - regex_lite::Regex::new(r"^\s*export\s+default\b").unwrap(), - regex_lite::Regex::new(r"^\s*export\s+const\b").unwrap(), - regex_lite::Regex::new(r"^\s*import\b").unwrap(), - ]; - let mut active_fence: Option<&str> = None; - for (index, line) in content.lines().enumerate() { - if let Some(caps) = code_fence_pattern.captures(line) { - let marker = caps.get(1).map(|m| m.as_str()).unwrap_or(""); - if active_fence.is_none() { - active_fence = Some(marker); - } else if active_fence == Some(marker) { - active_fence = None; - } - continue; - } - if active_fence.is_some() { - continue; - } - for pat in &residual_patterns { - if pat.is_match(line) { - return Err(format!( - "Compiled prompt still contains residual module syntax at {}:{}: {}", - file_path, - index + 1, - line.trim() - )); - } - } - } - Ok(()) -} - fn extract_front_matter(raw_mdx: &str) -> (Option, Option) { let front_matter_regex = regex_lite::Regex::new(r"(?s)^---\r?\n(.*?)\r?\n---(?:(?:\r?\n){1,2}|$)").ok(); From 37b7ecf3888e29c8de8a27721aa9eb6c4362f5ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 12:28:56 +0800 Subject: [PATCH 15/45] docs(prompt_artifact): attribute helper hoist to issue 254 --- sdk/src/repositories/prompt_artifact.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/src/repositories/prompt_artifact.rs b/sdk/src/repositories/prompt_artifact.rs index 74b453f6..ea6b7c91 100644 --- a/sdk/src/repositories/prompt_artifact.rs +++ b/sdk/src/repositories/prompt_artifact.rs @@ -111,9 +111,9 @@ pub fn read_prompt_artifact( /// inside fenced code blocks (``` / ~~~) are skipped so legitimately /// quoted JS examples don't trip the check. /// -/// Pre-#198 the same body was duplicated in both `skill.rs` and -/// `project_prompt.rs`; bug fixes had to be applied in lockstep. -/// Centralising here makes the assertion the single source of truth. +/// `#254` centralizes the helper that used to be duplicated in both +/// `skill.rs` and `project_prompt.rs`, so residual-module checks now +/// have a single in-tree source of truth. pub fn assert_no_residual_module_syntax(content: &str, file_path: &str) -> Result<(), String> { let code_fence_pattern = regex_lite::Regex::new(r"^\s*(```|~~~)").unwrap(); let residual_patterns = [ From 20d5826b5b25fd0142fa75d4bece3471fcfa664a Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 20:13:07 -0700 Subject: [PATCH 16/45] refactor(repositories): hoist detect_project_name_conflicts to series_conflict MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #199. \`detect_project_name_conflicts\` was near-identically implemented in both \`readme.rs\` (lines 33-81) and \`aindex_resolvers.rs\` (lines 29-77). The only meaningful difference between the two was the error-message prefix — \`"Readme project series name conflict:"\` vs \`"Aindex project series name conflict:"\`. Hoist the body to a new \`repositories::series_conflict\` module and replace both copies with thin shims that pass their respective prefix. The new function is generic over \`AsRef\` so callers can hand it either \`Vec<&str>\` or \`Vec\` without an extra clone. Module surface: - \`detect_project_name_conflicts(aindex_dir, series_names, error_prefix)\` walks every immediate subdirectory of each \`aindex_dir/\`, groups the basenames by which series they appeared in, and returns \`Err(format!("{error_prefix}: {sorted_names_csv}"))\` if any name appeared in more than one series. Identical observable behaviour to the two former bodies. Both \`readme.rs\` and \`aindex_resolvers.rs\` lose the unused \`use std::collections::HashMap;\` import that came with the duplicated body. Tests in the new module pin the contract — five cases: - returns Ok when no overlap (zh:projA, en:projB) - detects a single shared name across two series - sorts multiple conflicts alphabetically before joining - ignores files at the series root (e.g. \`zh/notes.md\` is not a project) - missing series dirs are skipped, not treated as conflicts \`cargo test --lib repositories\` is green at 55/55 (50 prior + 5 new series_conflict tests). (cherry picked from commit f297bb9becd49e13e2319c00b6a9f9ea1615676c) --- sdk/src/repositories/aindex_resolvers.rs | 51 +-------- sdk/src/repositories/mod.rs | 1 + sdk/src/repositories/readme.rs | 51 +-------- sdk/src/repositories/series_conflict.rs | 134 +++++++++++++++++++++++ 4 files changed, 147 insertions(+), 90 deletions(-) create mode 100644 sdk/src/repositories/series_conflict.rs diff --git a/sdk/src/repositories/aindex_resolvers.rs b/sdk/src/repositories/aindex_resolvers.rs index 617183f8..b8f54990 100644 --- a/sdk/src/repositories/aindex_resolvers.rs +++ b/sdk/src/repositories/aindex_resolvers.rs @@ -1,4 +1,3 @@ -use std::collections::HashMap; use std::path::Path; use serde::Deserialize; @@ -30,50 +29,12 @@ fn detect_project_name_conflicts( aindex_dir: &Path, series_configs: &[SeriesConfig], ) -> Result<(), String> { - let mut refs_by_project: HashMap> = HashMap::new(); - - for series in series_configs { - let series_src_dir = aindex_dir.join(series.name); - if !series_src_dir.is_dir() { - continue; - } - - let entries = match std::fs::read_dir(&series_src_dir) { - Ok(e) => e, - Err(_) => continue, - }; - - for entry in entries.flatten() { - if !entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { - continue; - } - let project_name = entry.file_name().to_string_lossy().into_owned(); - refs_by_project - .entry(project_name) - .or_default() - .push(series.name.to_string()); - } - } - - let conflicts: Vec = refs_by_project - .into_iter() - .filter(|(_, series_names)| { - let unique: std::collections::HashSet<_> = series_names.iter().collect(); - unique.len() > 1 - }) - .map(|(project_name, _)| project_name) - .collect(); - - if conflicts.is_empty() { - Ok(()) - } else { - let mut conflicts_sorted = conflicts; - conflicts_sorted.sort(); - Err(format!( - "Aindex project series name conflict: {}", - conflicts_sorted.join(", ") - )) - } + let series_names: Vec<&str> = series_configs.iter().map(|s| s.name).collect(); + crate::repositories::series_conflict::detect_project_name_conflicts( + aindex_dir, + &series_names, + "Aindex project series name conflict", + ) } fn load_project_config(project_name: &str, config_path: &Path) -> Result, String> { diff --git a/sdk/src/repositories/mod.rs b/sdk/src/repositories/mod.rs index 423a59f3..82ec7055 100644 --- a/sdk/src/repositories/mod.rs +++ b/sdk/src/repositories/mod.rs @@ -21,6 +21,7 @@ pub mod prompt_artifact; pub mod public_config; pub mod readme; pub mod rule; +pub mod series_conflict; pub mod shared_ignore; pub mod skill; pub mod subagent; diff --git a/sdk/src/repositories/readme.rs b/sdk/src/repositories/readme.rs index 42a8d993..621afe52 100644 --- a/sdk/src/repositories/readme.rs +++ b/sdk/src/repositories/readme.rs @@ -1,4 +1,3 @@ -use std::collections::HashMap; use std::path::Path; use serde::Deserialize; @@ -34,50 +33,12 @@ fn detect_project_name_conflicts( aindex_dir: &Path, series_configs: &[SeriesConfig], ) -> Result<(), String> { - let mut refs_by_project: HashMap> = HashMap::new(); - - for series in series_configs { - let series_src_dir = aindex_dir.join(series.name); - if !series_src_dir.is_dir() { - continue; - } - - let entries = match std::fs::read_dir(&series_src_dir) { - Ok(e) => e, - Err(_) => continue, - }; - - for entry in entries.flatten() { - if !entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { - continue; - } - let project_name = entry.file_name().to_string_lossy().into_owned(); - refs_by_project - .entry(project_name) - .or_default() - .push(series.name.to_string()); - } - } - - let conflicts: Vec = refs_by_project - .into_iter() - .filter(|(_, series_names)| { - let unique: std::collections::HashSet<_> = series_names.iter().collect(); - unique.len() > 1 - }) - .map(|(project_name, _)| project_name) - .collect(); - - if conflicts.is_empty() { - Ok(()) - } else { - let mut conflicts_sorted = conflicts; - conflicts_sorted.sort(); - Err(format!( - "Readme project series name conflict: {}", - conflicts_sorted.join(", ") - )) - } + let series_names: Vec<&str> = series_configs.iter().map(|s| s.name).collect(); + crate::repositories::series_conflict::detect_project_name_conflicts( + aindex_dir, + &series_names, + "Readme project series name conflict", + ) } fn collect_readme_files_recursive( diff --git a/sdk/src/repositories/series_conflict.rs b/sdk/src/repositories/series_conflict.rs new file mode 100644 index 00000000..741054e9 --- /dev/null +++ b/sdk/src/repositories/series_conflict.rs @@ -0,0 +1,134 @@ +//! Shared helper for detecting project-name collisions across the series +//! directories (`zh`, `en`, …) under a single `aindex/` root. +//! +//! Pre-#199 the same body was implemented in both +//! [`aindex_resolvers`](super::aindex_resolvers) and +//! [`readme`](super::readme); the only difference between the two +//! copies was the error-message prefix (`"Aindex project …"` vs +//! `"Readme project …"`). Centralising here makes the contract one +//! place instead of two and lets either consumer pass its own prefix. + +use std::collections::{HashMap, HashSet}; +use std::path::Path; + +/// Walk every immediate subdirectory under `aindex_dir/` for +/// each `series` in `series_names`, group the project-name basenames +/// by which series each one appeared in, and return an error listing +/// any project name that showed up under more than one series. +/// +/// `error_prefix` is plain text and is prepended verbatim to the +/// joined list of conflicting names — typical values are +/// `"Aindex project series name conflict"` or +/// `"Readme project series name conflict"`. +pub fn detect_project_name_conflicts>( + aindex_dir: &Path, + series_names: &[S], + error_prefix: &str, +) -> Result<(), String> { + let mut refs_by_project: HashMap> = HashMap::new(); + + for series in series_names { + let series_name = series.as_ref(); + let series_src_dir = aindex_dir.join(series_name); + if !series_src_dir.is_dir() { + continue; + } + + let entries = match std::fs::read_dir(&series_src_dir) { + Ok(e) => e, + Err(_) => continue, + }; + + for entry in entries.flatten() { + if !entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { + continue; + } + let project_name = entry.file_name().to_string_lossy().into_owned(); + refs_by_project + .entry(project_name) + .or_default() + .push(series_name.to_string()); + } + } + + let conflicts: Vec = refs_by_project + .into_iter() + .filter(|(_, series_names)| { + let unique: HashSet<_> = series_names.iter().collect(); + unique.len() > 1 + }) + .map(|(project_name, _)| project_name) + .collect(); + + if conflicts.is_empty() { + Ok(()) + } else { + let mut conflicts_sorted = conflicts; + conflicts_sorted.sort(); + Err(format!("{}: {}", error_prefix, conflicts_sorted.join(", "))) + } +} + +#[cfg(test)] +mod tests { + use super::detect_project_name_conflicts; + use std::fs; + use tempfile::tempdir; + + #[test] + fn returns_ok_when_no_conflicts() { + let dir = tempdir().unwrap(); + fs::create_dir_all(dir.path().join("zh/projA")).unwrap(); + fs::create_dir_all(dir.path().join("en/projB")).unwrap(); + + let result = detect_project_name_conflicts(dir.path(), &["zh", "en"], "Aindex"); + assert!(result.is_ok(), "no overlap should be Ok, got {:?}", result); + } + + #[test] + fn detects_single_conflict() { + let dir = tempdir().unwrap(); + fs::create_dir_all(dir.path().join("zh/shared")).unwrap(); + fs::create_dir_all(dir.path().join("en/shared")).unwrap(); + + let err = detect_project_name_conflicts(dir.path(), &["zh", "en"], "Test prefix").unwrap_err(); + assert!(err.starts_with("Test prefix: ")); + assert!(err.contains("shared")); + } + + #[test] + fn sorts_multiple_conflicts() { + let dir = tempdir().unwrap(); + fs::create_dir_all(dir.path().join("zh/banana")).unwrap(); + fs::create_dir_all(dir.path().join("en/banana")).unwrap(); + fs::create_dir_all(dir.path().join("zh/apple")).unwrap(); + fs::create_dir_all(dir.path().join("en/apple")).unwrap(); + + let err = detect_project_name_conflicts(dir.path(), &["zh", "en"], "Prefix").unwrap_err(); + assert_eq!(err, "Prefix: apple, banana"); + } + + #[test] + fn ignores_files_among_project_dirs() { + let dir = tempdir().unwrap(); + fs::create_dir_all(dir.path().join("zh")).unwrap(); + fs::create_dir_all(dir.path().join("en")).unwrap(); + // `notes.md` at the series root is not a project — it must be + // ignored, not produce a phantom "notes.md" key. + fs::write(dir.path().join("zh/notes.md"), b"x").unwrap(); + fs::write(dir.path().join("en/notes.md"), b"x").unwrap(); + + let result = detect_project_name_conflicts(dir.path(), &["zh", "en"], "Prefix"); + assert!(result.is_ok(), "files at the series root must be ignored, got {:?}", result); + } + + #[test] + fn missing_series_dir_is_skipped() { + let dir = tempdir().unwrap(); + fs::create_dir_all(dir.path().join("zh/onlyZh")).unwrap(); + // No "en" directory at all. + + let result = detect_project_name_conflicts(dir.path(), &["zh", "en"], "Prefix"); + assert!(result.is_ok(), "missing series dir is not a conflict, got {:?}", result); + } +} From 2a63b1e75b668ed3ee334c250888c264dc58d3b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 12:33:14 +0800 Subject: [PATCH 17/45] docs(series_conflict): attribute helper hoist to issue 255 --- sdk/src/repositories/series_conflict.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/src/repositories/series_conflict.rs b/sdk/src/repositories/series_conflict.rs index 741054e9..735010b2 100644 --- a/sdk/src/repositories/series_conflict.rs +++ b/sdk/src/repositories/series_conflict.rs @@ -1,9 +1,9 @@ //! Shared helper for detecting project-name collisions across the series //! directories (`zh`, `en`, …) under a single `aindex/` root. //! -//! Pre-#199 the same body was implemented in both +//! `#255` provides the shared helper that used to be duplicated in //! [`aindex_resolvers`](super::aindex_resolvers) and -//! [`readme`](super::readme); the only difference between the two +//! [`readme`](super::readme). The only difference between the former //! copies was the error-message prefix (`"Aindex project …"` vs //! `"Readme project …"`). Centralising here makes the contract one //! place instead of two and lets either consumer pass its own prefix. From d5ed8999bed3417248b4e26a3df2b7dde7490df3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Apr 2026 01:16:07 +0000 Subject: [PATCH 18/45] chore(deps): bump the cargo-workspace group with 4 updates Bumps the cargo-workspace group with 4 updates: [clap](https://github.com/clap-rs/clap), [deno_core](https://github.com/denoland/deno), [tokio](https://github.com/tokio-rs/tokio) and [tar](https://github.com/alexcrichton/tar-rs). Updates `clap` from 4.6.0 to 4.6.1 - [Release notes](https://github.com/clap-rs/clap/releases) - [Changelog](https://github.com/clap-rs/clap/blob/master/CHANGELOG.md) - [Commits](https://github.com/clap-rs/clap/compare/clap_complete-v4.6.0...clap_complete-v4.6.1) Updates `deno_core` from 0.398.0 to 0.399.0 - [Release notes](https://github.com/denoland/deno/releases) - [Changelog](https://github.com/denoland/deno/blob/main/Releases.md) - [Commits](https://github.com/denoland/deno/commits) Updates `tokio` from 1.49.0 to 1.50.0 - [Release notes](https://github.com/tokio-rs/tokio/releases) - [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.49.0...tokio-1.50.0) Updates `tar` from 0.4.44 to 0.4.45 - [Commits](https://github.com/alexcrichton/tar-rs/compare/0.4.44...0.4.45) --- updated-dependencies: - dependency-name: clap dependency-version: 4.6.1 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: cargo-workspace - dependency-name: deno_core dependency-version: 0.399.0 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: cargo-workspace - dependency-name: tokio dependency-version: 1.50.0 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: cargo-workspace - dependency-name: tar dependency-version: 0.4.45 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: cargo-workspace ... Signed-off-by: dependabot[bot] (cherry picked from commit 55426f097dea1ab80c6c9382e04212b3e3414ba5) --- Cargo.lock | 28 ++++++++++++++-------------- Cargo.toml | 2 +- sdk/Cargo.toml | 4 ++-- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 41ac9f74..7abdfb95 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -793,9 +793,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.6.0" +version = "4.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b193af5b67834b676abd72466a96c1024e6a6ad978a1f484bd90b85c94041351" +checksum = "1ddb117e43bbf7dacf0a4190fef4d345b9bad68dfc649cb349e7d17d28428e51" dependencies = [ "clap_builder", "clap_derive", @@ -815,9 +815,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.6.0" +version = "4.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1110bd8a634a1ab8cb04345d8d878267d57c3cf1b38d91b71af6686408bbca6a" +checksum = "f2ce8604710f6733aa641a2b3731eaa1e8b3d9973d5e3565da11800813f997a9" dependencies = [ "heck 0.5.0", "proc-macro2", @@ -1145,9 +1145,9 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.398.0" +version = "0.399.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75cca01372f2c47ef715908c68019d07f15bce3aad4a420444c16069a4e0791" +checksum = "2c1dbe504b49d3df8182331ff4c8cc80a5aec65d2e7c190ee9d82b7192d0eceb" dependencies = [ "anyhow", "az", @@ -1229,9 +1229,9 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.274.0" +version = "0.275.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9560662cdbffa3428ebac40f3ea5fb46ef8b557c41b04f62af045e56defd487" +checksum = "c5c212331778be520abc506a08319c48dfc870afa5543fdd9d4280b29c2820f4" dependencies = [ "indexmap 2.13.0", "proc-macro2", @@ -4770,9 +4770,9 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.307.0" +version = "0.308.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5804c1a650326aed05a374a062d4bd9c169bde371330b3e82659ac3ed8463ecd" +checksum = "1a997ab31931989dde87abe6efcf865450690e8fec099a5a2f9b733ca33d08b2" dependencies = [ "deno_error", "num-bigint", @@ -5705,9 +5705,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tar" -version = "0.4.44" +version = "0.4.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +checksum = "22692a6476a21fa75fdfc11d452fda482af402c008cdbaf3476414e122040973" dependencies = [ "filetime", "libc", @@ -6311,9 +6311,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.49.0" +version = "1.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" +checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" dependencies = [ "bytes", "libc", diff --git a/Cargo.toml b/Cargo.toml index 1cdc7c35..43b6f84d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,7 +46,7 @@ serde_json = { version = "1.0.149", features = ["preserve_order"] } serde_yml = "0.0.12" # CLI -clap = { version = "4.6.0", features = ["derive"] } +clap = { version = "4.6.1", features = ["derive"] } # Filesystem & system dirs = "6.0.0" diff --git a/sdk/Cargo.toml b/sdk/Cargo.toml index cc9b63f2..6ab7ff0e 100644 --- a/sdk/Cargo.toml +++ b/sdk/Cargo.toml @@ -34,9 +34,9 @@ markdown = { workspace = true } tempfile = "3.27.0" schemars = "1" deno_ast = { version = "0.53.1", features = ["emit", "transpiling"] } -deno_core = "0.398.0" +deno_core = "0.399.0" deno_error = "0.7.1" -tokio = { version = "1.48.0", features = ["rt-multi-thread", "macros"] } +tokio = { version = "1.50.0", features = ["rt-multi-thread", "macros"] } [dev-dependencies] proptest = "1.11.0" From 372e1aabaabadd5532b80fc635cd658a2971e7a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 16:00:29 +0800 Subject: [PATCH 19/45] style: cargo fmt fixes across workspace --- bug-pattern-report.json | 290 ++++++++++++++++++ bug_report.json | 56 ++++ cli/local-tests/src/lib.rs | 14 +- .../tests/opencode_agent_mode_validation.rs | 4 +- cli/local-tests/tests/trae_smoke.rs | 31 +- sdk/src/domain/mod.rs | 6 +- sdk/src/domain/output_context.rs | 4 +- .../output_plans/claude_code_output_plan.rs | 5 +- .../domain/output_plans/droid_output_plan.rs | 8 +- .../domain/output_plans/kiro_output_plan.rs | 5 +- .../output_plans/opencode_output_plan.rs | 5 +- .../domain/output_plans/trae_output_plan.rs | 16 +- sdk/src/infra/deno_runtime.rs | 10 +- sdk/src/policy/md_cleanup.rs | 4 +- sdk/src/repositories/command.rs | 14 +- sdk/src/repositories/localized_reader.rs | 6 +- sdk/src/repositories/project_prompt.rs | 4 +- sdk/src/repositories/rule.rs | 12 +- sdk/src/repositories/series_conflict.rs | 12 +- sdk/src/repositories/skill.rs | 4 +- sdk/src/repositories/subagent.rs | 12 +- sdk/src/services/clean_service.rs | 4 +- sdk/src/services/install_service.rs | 7 +- xtask/src/main.rs | 18 +- 24 files changed, 468 insertions(+), 83 deletions(-) create mode 100644 bug-pattern-report.json create mode 100644 bug_report.json diff --git a/bug-pattern-report.json b/bug-pattern-report.json new file mode 100644 index 00000000..11884614 --- /dev/null +++ b/bug-pattern-report.json @@ -0,0 +1,290 @@ +[ + { + "pattern": "unwrap_in_production", + "file": "sdk/src/repositories/project_prompt.rs", + "line": 28, + "code": "regex_lite::Regex::new(r\"^\\s*(```|~~~)\").unwrap()", + "is_bug": false, + "note": "Static regex compilation; panics only on invalid regex which is a compile-time constant" + }, + { + "pattern": "unwrap_in_production", + "file": "sdk/src/repositories/project_prompt.rs", + "line": 30, + "code": "regex_lite::Regex::new(r\"^\\s*export\\s+default\\b\").unwrap()", + "is_bug": false, + "note": "Static regex; compile-time constant, safe" + }, + { + "pattern": "unwrap_in_production", + "file": "sdk/src/repositories/project_prompt.rs", + "line": 31, + "code": "regex_lite::Regex::new(r\"^\\s*export\\s+const\\b\").unwrap()", + "is_bug": false, + "note": "Static regex; compile-time constant, safe" + }, + { + "pattern": "unwrap_in_production", + "file": "sdk/src/repositories/project_prompt.rs", + "line": 32, + "code": "regex_lite::Regex::new(r\"^\\s*import\\b\").unwrap()", + "is_bug": false, + "note": "Static regex; compile-time constant, safe" + }, + { + "pattern": "unwrap_in_production", + "file": "sdk/src/repositories/project_prompt.rs", + "line": 302, + "code": "project.project_type.clone().unwrap()", + "is_bug": true, + "note": "PANIC RISK: Guards is_some() on line 301 but uses unwrap on clone instead of unwrap_or/expect with context. If logic changes, this panics without a meaningful message. Should use expect() or unwrap_or_default()." + }, + { + "pattern": "unwrap_in_production", + "file": "sdk/src/repositories/project_prompt.rs", + "line": 319, + "code": "matching_series.unwrap()", + "is_bug": true, + "note": "PANIC RISK: Guards is_none() on line 314 with continue, but unwrap has no error message. Should use expect() for better diagnostics if the guard ever changes." + }, + { + "pattern": "expect_in_production", + "file": "sdk/src/infra/logger/sink.rs", + "line": 113, + "code": ".expect(\"failed to spawn logger output worker\")", + "is_bug": false, + "note": "Thread spawn failure; acceptable - application cannot function without logger" + }, + { + "pattern": "allow_dead_code", + "file": "cli/src/cli.rs", + "line": 105, + "code": "#[allow(dead_code)] pub fn as_str(self) -> &'static str", + "is_bug": true, + "note": "Dead code: as_str() on ResolvedLogLevel enum is unused in production. Could be removed or used. Signals possible incomplete feature integration." + }, + { + "pattern": "allow_dead_code", + "file": "sdk/src/services/dry_run_service.rs", + "line": 17, + "code": "#[allow(dead_code)] struct PlannedOutputFile", + "is_bug": true, + "note": "Dead code: PlannedOutputFile struct fields (path, content, encoding) are never read in production. Incomplete dry-run output implementation." + }, + { + "pattern": "allow_dead_code", + "file": "sdk/src/infra/script_runtime.rs", + "line": 220, + "code": "#[allow(dead_code)] worker_path: Option", + "is_bug": true, + "note": "Dead code: ResolvePublicPathContext.worker_path is deserialized but never read. Unused field in production." + }, + { + "pattern": "allow_dead_code", + "file": "sdk/src/infra/script_runtime.rs", + "line": 222, + "code": "#[allow(dead_code)] timeout_ms: Option", + "is_bug": true, + "note": "Dead code: ResolvePublicPathContext.timeout_ms is deserialized but never read. Unused field in production." + }, + { + "pattern": "todo_comment", + "file": "gui/src-tauri/src/tray.rs", + "line": 60, + "code": "// TODO: Trigger pipeline execution via sidecar once the full IPC wiring is in place.", + "is_bug": true, + "note": "Incomplete feature: Tray 'install' action only shows/focuses window instead of triggering pipeline. Unfinished IPC wiring." + }, + { + "pattern": "println_in_production", + "file": "cli/src/bin/tnmsc-test-api.rs", + "line": 40, + "code": "println!(\"{output}\")", + "is_bug": false, + "note": "Test API binary, println acceptable for CLI output" + }, + { + "pattern": "println_in_production", + "file": "cli/src/bin/tnmsc-test-api.rs", + "line": 44, + "code": "eprintln!(\"Error: {error}\")", + "is_bug": false, + "note": "Test API binary, eprintln acceptable for error output" + }, + { + "pattern": "println_in_production", + "file": "cli/src/commands/help.rs", + "line": "4-26", + "code": "println!() (23 occurrences)", + "is_bug": false, + "note": "Help command output to stdout - println is appropriate for user-facing help text" + }, + { + "pattern": "println_in_production", + "file": "cli/src/commands/package.rs", + "line": 45, + "code": "println!(\"Hydrated {}\", path.display())", + "is_bug": true, + "note": "CLI progress output should use logger, not raw println. Inconsistent with rest of CLI." + }, + { + "pattern": "println_in_production", + "file": "cli/src/commands/package.rs", + "line": 50, + "code": "eprintln!(\"Error: {error}\")", + "is_bug": true, + "note": "Error output should use logger.error(), not raw eprintln. Inconsistent error handling." + }, + { + "pattern": "println_in_production", + "file": "cli/src/commands/pipeline.rs", + "line": 214, + "code": "println!(\"{line}\")", + "is_bug": false, + "note": "Pipeline output to stdout - intentional user-facing output" + }, + { + "pattern": "println_in_production", + "file": "cli/src/commands/pipeline.rs", + "line": 217, + "code": "eprintln!(\"{line}\")", + "is_bug": false, + "note": "Pipeline error output to stderr - intentional user-facing output" + }, + { + "pattern": "println_in_production", + "file": "cli/src/commands/version.rs", + "line": 4, + "code": "println!(\"{}\", env!(\"CARGO_PKG_VERSION\"))", + "is_bug": false, + "note": "Version output - println appropriate for CLI version command" + }, + { + "pattern": "println_in_production", + "file": "gui/src-tauri/src/lib.rs", + "line": 43, + "code": "eprintln!(\"error while running tauri application: {error}\")", + "is_bug": true, + "note": "Application startup error should use logger, not raw eprintln. Especially important for GUI where stderr may not be visible." + }, + { + "pattern": "println_in_production", + "file": "mcp/src/main.rs", + "line": 277, + "code": "eprintln!(\"JSON parse error: {}\", e)", + "is_bug": true, + "note": "MCP server error output uses eprintln instead of logger. MCP communicates via stdout, so eprintln goes to stderr which may confuse users." + }, + { + "pattern": "println_in_production", + "file": "mcp/src/commands/package.rs", + "line": 45, + "code": "println!(\"Hydrated {}\", path.display())", + "is_bug": true, + "note": "MCP package progress should use logger, not println. MCP uses stdout for JSON-RPC; println may corrupt protocol." + }, + { + "pattern": "println_in_production", + "file": "mcp/src/commands/package.rs", + "line": 50, + "code": "eprintln!(\"Error: {error}\")", + "is_bug": true, + "note": "MCP package error should use logger, not raw eprintln." + }, + { + "pattern": "println_in_production", + "file": "xtask/src/main.rs", + "line": "130-237", + "code": "println!() (22 occurrences for task progress)", + "is_bug": false, + "note": "xtask is a dev build tool, println for progress output is acceptable and standard practice" + }, + { + "pattern": "println_in_production", + "file": "sdk/build.rs", + "line": 2, + "code": "println!(\"cargo:rerun-if-changed=build.rs\")", + "is_bug": false, + "note": "Cargo build script requires println! for build directives - mandatory pattern" + }, + { + "pattern": "hardcoded_url", + "file": "mcp/src/main.rs", + "line": "84,100,114,130", + "code": "\"https://json-schema.org/draft/2020-12/schema\"", + "is_bug": false, + "note": "JSON Schema standard URL reference - constant, appropriate to hardcode" + }, + { + "pattern": "unsafe_block", + "file": "sdk/src/domain/output_plans/droid_output_plan.rs", + "line": "791-803", + "code": "unsafe { std::env::set_var(\"HOME\", ...) } (x3)", + "is_bug": false, + "note": "Test-only code (inside #[cfg(test)] module). Uses mutex-guarded env var mutation. Rust 2024 requires unsafe for set_var." + }, + { + "pattern": "unsafe_block", + "file": "sdk/src/domain/output_plans/gemini_output_plan.rs", + "line": "297-309", + "code": "unsafe { std::env::set_var(\"HOME\", ...) } (x3)", + "is_bug": false, + "note": "Test-only code. Same pattern as droid_output_plan." + }, + { + "pattern": "unsafe_block", + "file": "sdk/src/services/dry_run_service.rs", + "line": "255-265", + "code": "unsafe { std::env::set_var(...) } (x3)", + "is_bug": false, + "note": "Test-only code (inside #[cfg(test)] module)." + }, + { + "pattern": "unsafe_block", + "file": "sdk/src/services/clean_service.rs", + "line": "637-649", + "code": "unsafe { std::env::set_var(\"HOME\", ...) } (x3)", + "is_bug": false, + "note": "Test-only code. Same test helper pattern." + }, + { + "pattern": "unsafe_block", + "file": "sdk/src/infra/deno_runtime.rs", + "line": "378-398", + "code": "unsafe { std::env::remove_var/set_var(...) } (x4)", + "is_bug": false, + "note": "Test-only code (inside #[cfg(test)] module)." + }, + { + "pattern": "unsafe_block", + "file": "sdk/src/infra/logger/mod.rs", + "line": 237, + "code": "unsafe { std::env::remove_var(\"LOG_LEVEL\") }", + "is_bug": false, + "note": "Test-only code." + }, + { + "pattern": "clone_on_large_type", + "file": "sdk/src/domain/config/mod.rs", + "line": 924, + "code": "loaded.iter().map(|r| r.config.clone()).collect()", + "is_bug": false, + "note": "Clones Vec of UserConfigFile during config merging. Occurs once per pipeline run. Acceptable." + }, + { + "pattern": "clone_on_large_type", + "file": "sdk/src/domain/config/mod.rs", + "line": 970, + "code": "serde_json::from_value::(parsed.clone())", + "is_bug": false, + "note": "Clones serde_json::Value to avoid borrow. Common pattern." + }, + { + "pattern": "box_leak_or_static_hack", + "file": "N/A", + "line": "N/A", + "code": "N/A", + "is_bug": false, + "note": "No Box::leak or 'static lifetime hacks found in production code. All 'static usages are legitimate (enum as_str returns, function return types, JSON schema URLs)." + } +] diff --git a/bug_report.json b/bug_report.json new file mode 100644 index 00000000..1e336e00 --- /dev/null +++ b/bug_report.json @@ -0,0 +1,56 @@ +[ + { + "title": "[Kiro] Typo 'streening' instead of 'steering' in cleanup glob patterns", + "file": "sdk/src/domain/output_plans/kiro_output_plan.rs:39", + "severity": "high", + "description": "The kiro_output_plan uses '.kiro/streening' in all cleanup glob patterns (lines 39-40, 44-45, 51) instead of the correct '.kiro/steering'. All Kiro documentation (including the repo's own .claude/skills/ai-agents/kiro.md line 28) says the directory is '.kiro/steering/*.md'. This means cleanup targets a non-existent directory, and actual '.kiro/steering/' files will never be cleaned up. The test at line 190 also asserts on the wrong path, masking this bug." + }, + { + "title": "[Claude Code] build_rule_content does not filter null values from YAML metadata", + "file": "sdk/src/domain/output_plans/claude_code_output_plan.rs:242", + "severity": "medium", + "description": "In claude_code_output_plan.rs, build_rule_content (line 242) only filters empty arrays: 'metadata.retain(|_, v| !(v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)))'. But build_agent_content (line 266), build_command_content (line 296), and build_skill_content (line 324) also filter null values with '!(v.is_null() || ...)'. This inconsistency means null YAML front matter fields on rules will serialize as 'key: null' in generated YAML, potentially confusing Claude Code's parser." + }, + { + "title": "[Cursor/Warp/Windsurf] Child memory prompts dropped when agents_registered=true", + "file": "sdk/src/domain/output_plans/cursor_output_plan.rs:47", + "severity": "medium", + "description": "In cursor_output_plan.rs (lines 47-63), warp_output_plan.rs (lines 49-65), and windsurf_output_plan.rs (lines 46-62), when agents_registered is true, only global memory content is emitted per project. Project root prompts AND child memory prompts are completely skipped. This differs from the non-registered path where both root and child prompts are included. Projects with child directories (e.g., commands/, docs/) lose their subdirectory-level prompt files when AgentsOutputAdaptor is active." + }, + { + "title": "[Droid] build_markdown_with_front_matter omits YAML list indentation fix", + "file": "sdk/src/domain/output_plans/droid_output_plan.rs:611", + "severity": "medium", + "description": "The droid_output_plan uses build_markdown_with_front_matter (line 594) which calls serde_yml::to_string and wraps with '---' delimiters, but does NOT call indent_yaml_list_items. Other output plans (claude_code:347, codex:282, opencode:317) all indent list items after serde_yml serialization because serde_yml renders flat sequences (e.g., 'keywords:\\n- foo'). Droid output will have unindented list items, producing non-standard YAML front matter." + }, + { + "title": "[MCP] list_prompts tool schema declares distStatus but handler ignores it", + "file": "mcp/src/main.rs:90", + "severity": "medium", + "description": "The MCP tool definition for list_prompts (line 90) declares 'distStatus' as an input parameter in the JSON schema, but handle_list_prompts (lines 166-186) never extracts this parameter. The ListPromptsOptions struct (sdk/src/services/prompt_service.rs:66-75) has no dist_status field. Clients specifying distStatus filters will have them silently ignored." + }, + { + "title": "[MCP] apply_prompt_translation schema says enContent is optional but handler requires it", + "file": "mcp/src/main.rs:121", + "severity": "medium", + "description": "The MCP tool schema for apply_prompt_translation (line 121) declares 'required: [\"promptId\"]', making enContent appear optional. But the handler (line 241) returns an error if enContent is missing: 'apply_prompt_translation requires enContent'. Additionally, distContent (line 135) is declared in the schema but never extracted or used by the handler." + }, + { + "title": "[Codex] force_yaml_values_quoted fragile key-value splitting on first ': '", + "file": "sdk/src/domain/output_plans/codex_output_plan.rs:315", + "severity": "low", + "description": "The force_yaml_values_quoted function (line 310) uses 'line.find(\": \")' to split key from value at the first occurrence of ': '. While this works for typical metadata keys (description, argument-hint), it would produce incorrect quoting for keys that contain ': ' in their name. Currently safe because all keys are simple alphanumeric/kebab-case strings, but the function lacks a comment documenting this assumption." + }, + { + "title": "[Shared] indent_yaml_list_items duplicated across 3 output plan files", + "file": "sdk/src/domain/output_plans/claude_code_output_plan.rs:354", + "severity": "low", + "description": "The indent_yaml_list_items function is identically duplicated in claude_code_output_plan.rs:354, codex_output_plan.rs:356, and opencode_output_plan.rs:389. This violates DRY and risks divergence if one copy is updated but others are missed. Should be extracted to a shared utility module." + }, + { + "title": "[Base Plans] EditorConfig files from multiple sources could produce duplicate output paths", + "file": "sdk/src/domain/base_output_plans.rs:260", + "severity": "low", + "description": "In build_readme_plugin_plan (line 260), the nested loop iterates over editor_config_files × concrete_projects. If context.editor_config_files contains multiple .editorconfig entries from different source directories, each project will get multiple output_file entries at the same path ('project_root/.editorconfig') with potentially different content, without deduplication." + } +] diff --git a/cli/local-tests/src/lib.rs b/cli/local-tests/src/lib.rs index 24e8b424..a23afe6f 100644 --- a/cli/local-tests/src/lib.rs +++ b/cli/local-tests/src/lib.rs @@ -218,12 +218,22 @@ impl LocalTestRunner { /// 检查项目级 .trae/steering/GLOBAL.md 是否存在。 pub fn trae_steering_file_exists(&self) -> bool { - self.cwd.join(".trae").join("steering").join("GLOBAL.md").is_file() + self + .cwd + .join(".trae") + .join("steering") + .join("GLOBAL.md") + .is_file() } /// 检查项目级 .trae-cn/user_rules/GLOBAL.md 是否存在。 pub fn trae_cn_file_exists(&self) -> bool { - self.cwd.join(".trae-cn").join("user_rules").join("GLOBAL.md").is_file() + self + .cwd + .join(".trae-cn") + .join("user_rules") + .join("GLOBAL.md") + .is_file() } /// 检查项目级 CLAUDE.md 是否存在。 diff --git a/cli/local-tests/tests/opencode_agent_mode_validation.rs b/cli/local-tests/tests/opencode_agent_mode_validation.rs index b0aa535f..00c08d11 100644 --- a/cli/local-tests/tests/opencode_agent_mode_validation.rs +++ b/cli/local-tests/tests/opencode_agent_mode_validation.rs @@ -62,9 +62,7 @@ fn extract_mode_from_agent_file(content: &str) -> Option { break; } } - if in_front_matter - && let Some(mode) = extract_mode_from_front_matter_line(line) - { + if in_front_matter && let Some(mode) = extract_mode_from_front_matter_line(line) { return Some(mode); } } diff --git a/cli/local-tests/tests/trae_smoke.rs b/cli/local-tests/tests/trae_smoke.rs index ef616bfb..a9caa464 100644 --- a/cli/local-tests/tests/trae_smoke.rs +++ b/cli/local-tests/tests/trae_smoke.rs @@ -51,14 +51,26 @@ fn local_trae_steering_idempotent() { first.assert_success("first tnmsc install"); assert!(runner.trae_steering_file_exists()); - let content_first = - fs::read_to_string(runner.cwd().join(".trae").join("steering").join("GLOBAL.md")).unwrap(); + let content_first = fs::read_to_string( + runner + .cwd() + .join(".trae") + .join("steering") + .join("GLOBAL.md"), + ) + .unwrap(); let second = runner.install(); second.assert_success("second tnmsc install"); - let content_second = - fs::read_to_string(runner.cwd().join(".trae").join("steering").join("GLOBAL.md")).unwrap(); + let content_second = fs::read_to_string( + runner + .cwd() + .join(".trae") + .join("steering") + .join("GLOBAL.md"), + ) + .unwrap(); assert_eq!( content_first, content_second, @@ -100,10 +112,17 @@ fn local_trae_cn_cleaned_for_compatibility() { assert!(runner.trae_steering_file_exists()); // Simulate old-style .trae-cn/ output (should be cleaned up) - let trae_cn_path = runner.cwd().join(".trae-cn").join("user_rules").join("GLOBAL.md"); + let trae_cn_path = runner + .cwd() + .join(".trae-cn") + .join("user_rules") + .join("GLOBAL.md"); fs::create_dir_all(trae_cn_path.parent().unwrap()).unwrap(); fs::write(&trae_cn_path, "# legacy\n").unwrap(); - assert!(runner.trae_cn_file_exists(), "fake .trae-cn should exist before clean"); + assert!( + runner.trae_cn_file_exists(), + "fake .trae-cn should exist before clean" + ); let clean = runner.clean(); clean.assert_success("tnmsc clean removes legacy .trae-cn"); diff --git a/sdk/src/domain/mod.rs b/sdk/src/domain/mod.rs index 1f9d9faa..535138a7 100644 --- a/sdk/src/domain/mod.rs +++ b/sdk/src/domain/mod.rs @@ -13,9 +13,9 @@ pub use cleanup::{ pub use config::{ConfigLoader, MergedConfigResult, PluginsConfig, UserConfigFile}; pub use output_context::OutputContext; pub use plugin_shared::{ - AIAgentIgnoreConfigFile, SlashCommandPrompt, GlobalMemoryPrompt, IDEKind, NamingCaseKind, - PluginKind, Project, ProjectIDEConfigFile, PromptKind, ReadmePrompt, RelativePath, RulePrompt, - RuleScope, SkillPrompt, SubAgentPrompt, Workspace, + AIAgentIgnoreConfigFile, GlobalMemoryPrompt, IDEKind, NamingCaseKind, PluginKind, Project, + ProjectIDEConfigFile, PromptKind, ReadmePrompt, RelativePath, RulePrompt, RuleScope, SkillPrompt, + SlashCommandPrompt, SubAgentPrompt, Workspace, }; #[cfg(test)] diff --git a/sdk/src/domain/output_context.rs b/sdk/src/domain/output_context.rs index 4e957666..17f8ed08 100644 --- a/sdk/src/domain/output_context.rs +++ b/sdk/src/domain/output_context.rs @@ -1,8 +1,8 @@ use serde::{Deserialize, Serialize}; use crate::domain::plugin_shared::{ - AIAgentIgnoreConfigFile, SlashCommandPrompt, GlobalMemoryPrompt, ProjectIDEConfigFile, - ReadmePrompt, RulePrompt, SkillPrompt, SubAgentPrompt, Workspace, + AIAgentIgnoreConfigFile, GlobalMemoryPrompt, ProjectIDEConfigFile, ReadmePrompt, RulePrompt, + SkillPrompt, SlashCommandPrompt, SubAgentPrompt, Workspace, }; #[derive(Debug, Clone, Default, Serialize, Deserialize)] diff --git a/sdk/src/domain/output_plans/claude_code_output_plan.rs b/sdk/src/domain/output_plans/claude_code_output_plan.rs index f4855fad..4a7d13a3 100644 --- a/sdk/src/domain/output_plans/claude_code_output_plan.rs +++ b/sdk/src/domain/output_plans/claude_code_output_plan.rs @@ -451,7 +451,10 @@ mod tests { length: 5, file_path_kind: crate::infra::path_types::FilePathKind::Relative, relative_path: "guide.mdx".to_string(), - dir: crate::infra::path_types::RelativePath::new("guide.mdx", "/workspace/aindex/skills/test"), + dir: crate::infra::path_types::RelativePath::new( + "guide.mdx", + "/workspace/aindex/skills/test", + ), raw_front_matter: None, markdown_ast: None, markdown_contents: None, diff --git a/sdk/src/domain/output_plans/droid_output_plan.rs b/sdk/src/domain/output_plans/droid_output_plan.rs index 05f7b54e..fbfd6004 100644 --- a/sdk/src/domain/output_plans/droid_output_plan.rs +++ b/sdk/src/domain/output_plans/droid_output_plan.rs @@ -9,7 +9,7 @@ use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTa use crate::domain::config; use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::{ - SlashCommandPrompt, Project, RelativePath, RuleScope, SkillPrompt, Workspace, + Project, RelativePath, RuleScope, SkillPrompt, SlashCommandPrompt, Workspace, }; const DROID_PLUGIN_NAME: &str = "DroidCLIOutputAdaptor"; @@ -623,9 +623,9 @@ mod tests { use super::*; use crate::domain::plugin_shared::{ - SlashCommandYAMLFrontMatter, FilePathKind, GlobalMemoryPrompt, ProjectChildrenMemoryPrompt, - ProjectRootMemoryPrompt, PromptKind, RootPath, SkillChildDoc, SkillResource, - SkillResourceEncoding, SkillYAMLFrontMatter, + FilePathKind, GlobalMemoryPrompt, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt, + PromptKind, RootPath, SkillChildDoc, SkillResource, SkillResourceEncoding, + SkillYAMLFrontMatter, SlashCommandYAMLFrontMatter, }; fn create_relative_path(base_path: &str, path: &str) -> RelativePath { diff --git a/sdk/src/domain/output_plans/kiro_output_plan.rs b/sdk/src/domain/output_plans/kiro_output_plan.rs index 16081603..e2252a56 100644 --- a/sdk/src/domain/output_plans/kiro_output_plan.rs +++ b/sdk/src/domain/output_plans/kiro_output_plan.rs @@ -187,7 +187,10 @@ mod tests { .map(|d| d.path.as_str()) .collect(); - assert!(paths.contains(&"/tmp/workspace/.kiro/streening"), "expected workspace root glob"); + assert!( + paths.contains(&"/tmp/workspace/.kiro/streening"), + "expected workspace root glob" + ); assert!( paths.contains(&"/tmp/workspace/project-a/.kiro/specs/**/*"), "expected project glob" diff --git a/sdk/src/domain/output_plans/opencode_output_plan.rs b/sdk/src/domain/output_plans/opencode_output_plan.rs index 1d2264d7..921177b4 100644 --- a/sdk/src/domain/output_plans/opencode_output_plan.rs +++ b/sdk/src/domain/output_plans/opencode_output_plan.rs @@ -712,7 +712,10 @@ mod tests { length: 5, file_path_kind: crate::infra::path_types::FilePathKind::Relative, relative_path: "guide.mdx".to_string(), - dir: crate::infra::path_types::RelativePath::new("guide.mdx", "/workspace/aindex/skills/test"), + dir: crate::infra::path_types::RelativePath::new( + "guide.mdx", + "/workspace/aindex/skills/test", + ), raw_front_matter: None, markdown_ast: None, markdown_contents: None, diff --git a/sdk/src/domain/output_plans/trae_output_plan.rs b/sdk/src/domain/output_plans/trae_output_plan.rs index 09a837ce..7edfc554 100644 --- a/sdk/src/domain/output_plans/trae_output_plan.rs +++ b/sdk/src/domain/output_plans/trae_output_plan.rs @@ -267,11 +267,7 @@ mod tests { }; let plan = build_trae_output_plan(&context).unwrap(); - let output_paths: Vec<&str> = plan - .output_files - .iter() - .map(|f| f.path.as_str()) - .collect(); + let output_paths: Vec<&str> = plan.output_files.iter().map(|f| f.path.as_str()).collect(); assert!( output_paths.contains( @@ -286,9 +282,7 @@ mod tests { ); assert!( - !output_paths - .iter() - .any(|p| p.contains(".trae-cn")), + !output_paths.iter().any(|p| p.contains(".trae-cn")), "output must NOT include any .trae-cn path, got: {:?}", output_paths ); @@ -324,11 +318,7 @@ mod tests { }; let plan = build_trae_output_plan(&context).unwrap(); - let output_paths: Vec<&str> = plan - .output_files - .iter() - .map(|f| f.path.as_str()) - .collect(); + let output_paths: Vec<&str> = plan.output_files.iter().map(|f| f.path.as_str()).collect(); assert!( output_paths.contains( diff --git a/sdk/src/infra/deno_runtime.rs b/sdk/src/infra/deno_runtime.rs index f298a154..c6fb163f 100644 --- a/sdk/src/infra/deno_runtime.rs +++ b/sdk/src/infra/deno_runtime.rs @@ -249,7 +249,11 @@ fn allowed_environment(context: &serde_json::Value) -> BTreeMap .into_iter() .flatten() .filter_map(serde_json::Value::as_str) - .filter_map(|name| std::env::var(name).ok().map(|value| (name.to_string(), value))) + .filter_map(|name| { + std::env::var(name) + .ok() + .map(|value| (name.to_string(), value)) + }) .collect() } @@ -520,7 +524,9 @@ console.log(JSON.stringify({ let context = serde_json::json!({ "allowedEnv": ["TNMSD_ALLOWED_ENV_FOR_TEST", "TNMSD_MISSING_ENV_FOR_TEST"] }); - let result = runtime.execute_ts(&script_path, &context.to_string()).unwrap(); + let result = runtime + .execute_ts(&script_path, &context.to_string()) + .unwrap(); let parsed: serde_json::Value = serde_json::from_str(result.trim()).unwrap(); assert_eq!(parsed["allowed"], "visible-value"); diff --git a/sdk/src/policy/md_cleanup.rs b/sdk/src/policy/md_cleanup.rs index d0317d22..2609212e 100644 --- a/sdk/src/policy/md_cleanup.rs +++ b/sdk/src/policy/md_cleanup.rs @@ -76,9 +76,7 @@ fn process_markdown_file( return; } - if !dry_run - && let Err(err) = std::fs::write(file_path, &cleaned) - { + if !dry_run && let Err(err) = std::fs::write(file_path, &cleaned) { errors.push((file_path.to_string_lossy().into_owned(), err.to_string())); return; } diff --git a/sdk/src/repositories/command.rs b/sdk/src/repositories/command.rs index a88f54c1..aacf128d 100644 --- a/sdk/src/repositories/command.rs +++ b/sdk/src/repositories/command.rs @@ -3,7 +3,7 @@ use serde_json::Value; use crate::domain::config; use crate::domain::plugin_shared::{ - SlashCommandPrompt, SlashCommandYAMLFrontMatter, PromptKind, RelativePath, + PromptKind, RelativePath, SlashCommandPrompt, SlashCommandYAMLFrontMatter, }; use crate::repositories::localized_reader::read_flat_files; @@ -48,15 +48,9 @@ fn build_command_prompt( entry: &crate::repositories::localized_reader::FlatFileEntry, dir: &str, ) -> Result { - let compiled = entry - .compiled - .as_ref() - .ok_or_else(|| { - crate::CliError::ConfigError(format!( - "Missing compiled prompt: {}.mdx", - entry.name - )) - })?; + let compiled = entry.compiled.as_ref().ok_or_else(|| { + crate::CliError::ConfigError(format!("Missing compiled prompt: {}.mdx", entry.name)) + })?; let file_path = format!("{}/{}.mdx", dir, entry.name); validate_command_metadata(&compiled.metadata, &file_path) diff --git a/sdk/src/repositories/localized_reader.rs b/sdk/src/repositories/localized_reader.rs index 2402b7ba..0ea9da8f 100644 --- a/sdk/src/repositories/localized_reader.rs +++ b/sdk/src/repositories/localized_reader.rs @@ -145,11 +145,7 @@ mod tests { fs::write(rules_dir.join("alpha.en.src.mdx"), "en source").unwrap(); fs::write(rules_dir.join("alpha.mdx"), "compiled").unwrap(); - let entries = read_flat_files( - temp_dir.path().join("rules").to_str().unwrap(), - None, - ) - .unwrap(); + let entries = read_flat_files(temp_dir.path().join("rules").to_str().unwrap(), None).unwrap(); assert_eq!(entries.len(), 1); let entry = &entries[0]; diff --git a/sdk/src/repositories/project_prompt.rs b/sdk/src/repositories/project_prompt.rs index a337e389..3373831f 100644 --- a/sdk/src/repositories/project_prompt.rs +++ b/sdk/src/repositories/project_prompt.rs @@ -8,7 +8,9 @@ use crate::domain::plugin_shared::{ FilePathKind, Project, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt, PromptKind, RelativePath, RootPath, Workspace, }; -use crate::repositories::prompt_artifact::{assert_no_residual_module_syntax, read_prompt_artifact}; +use crate::repositories::prompt_artifact::{ + assert_no_residual_module_syntax, read_prompt_artifact, +}; #[derive(Debug, Clone, Default, Deserialize)] #[serde(rename_all = "camelCase")] diff --git a/sdk/src/repositories/rule.rs b/sdk/src/repositories/rule.rs index 54d93051..79df1a6b 100644 --- a/sdk/src/repositories/rule.rs +++ b/sdk/src/repositories/rule.rs @@ -92,15 +92,9 @@ fn build_rule_prompt( entry: &crate::repositories::localized_reader::FlatFileEntry, dir: &str, ) -> Result { - let compiled = entry - .compiled - .as_ref() - .ok_or_else(|| { - crate::CliError::ConfigError(format!( - "Missing compiled prompt: {}.mdx", - entry.name - )) - })?; + let compiled = entry.compiled.as_ref().ok_or_else(|| { + crate::CliError::ConfigError(format!("Missing compiled prompt: {}.mdx", entry.name)) + })?; let file_path = format!("{}/{}.mdx", dir, entry.name); validate_rule_metadata(&compiled.metadata, &file_path).map_err(crate::CliError::ConfigError)?; diff --git a/sdk/src/repositories/series_conflict.rs b/sdk/src/repositories/series_conflict.rs index 735010b2..7b4971ef 100644 --- a/sdk/src/repositories/series_conflict.rs +++ b/sdk/src/repositories/series_conflict.rs @@ -119,7 +119,11 @@ mod tests { fs::write(dir.path().join("en/notes.md"), b"x").unwrap(); let result = detect_project_name_conflicts(dir.path(), &["zh", "en"], "Prefix"); - assert!(result.is_ok(), "files at the series root must be ignored, got {:?}", result); + assert!( + result.is_ok(), + "files at the series root must be ignored, got {:?}", + result + ); } #[test] @@ -129,6 +133,10 @@ mod tests { // No "en" directory at all. let result = detect_project_name_conflicts(dir.path(), &["zh", "en"], "Prefix"); - assert!(result.is_ok(), "missing series dir is not a conflict, got {:?}", result); + assert!( + result.is_ok(), + "missing series dir is not a conflict, got {:?}", + result + ); } } diff --git a/sdk/src/repositories/skill.rs b/sdk/src/repositories/skill.rs index 3e939de0..7fe30862 100644 --- a/sdk/src/repositories/skill.rs +++ b/sdk/src/repositories/skill.rs @@ -9,7 +9,9 @@ use crate::domain::plugin_shared::{ FilePathKind, McpServerConfig, PromptKind, RelativePath, SkillChildDoc, SkillMcpConfig, SkillPrompt, SkillResource, SkillResourceEncoding, SkillYAMLFrontMatter, }; -use crate::repositories::prompt_artifact::{assert_no_residual_module_syntax, read_prompt_artifact}; +use crate::repositories::prompt_artifact::{ + assert_no_residual_module_syntax, read_prompt_artifact, +}; #[derive(Debug, Clone, Default, Deserialize)] #[serde(rename_all = "camelCase")] diff --git a/sdk/src/repositories/subagent.rs b/sdk/src/repositories/subagent.rs index a4936158..444459a4 100644 --- a/sdk/src/repositories/subagent.rs +++ b/sdk/src/repositories/subagent.rs @@ -81,15 +81,9 @@ fn build_subagent_prompt( dir: &str, diagnostics: &mut Vec, ) -> Result { - let compiled = entry - .compiled - .as_ref() - .ok_or_else(|| { - crate::CliError::ConfigError(format!( - "Missing compiled prompt: {}.mdx", - entry.name - )) - })?; + let compiled = entry.compiled.as_ref().ok_or_else(|| { + crate::CliError::ConfigError(format!("Missing compiled prompt: {}.mdx", entry.name)) + })?; let file_path = format!("{}/{}.mdx", dir, entry.name); validate_subagent_metadata(&compiled.metadata, &file_path) diff --git a/sdk/src/services/clean_service.rs b/sdk/src/services/clean_service.rs index 11f78c3d..d8bbc433 100644 --- a/sdk/src/services/clean_service.rs +++ b/sdk/src/services/clean_service.rs @@ -95,8 +95,8 @@ pub fn clean(options: MemorySyncCommandOptions) -> Result>(); diff --git a/sdk/src/services/install_service.rs b/sdk/src/services/install_service.rs index 12c8d5ee..dce2eeac 100644 --- a/sdk/src/services/install_service.rs +++ b/sdk/src/services/install_service.rs @@ -406,7 +406,12 @@ fn discover_install_project_roots( .strip_prefix(workspace) .ok() .and_then(|relative| relative.components().next()) - .map(|component| workspace.join(component.as_os_str()).to_string_lossy().into_owned()) + .map(|component| { + workspace + .join(component.as_os_str()) + .to_string_lossy() + .into_owned() + }) }) .collect::>(); roots.sort(); diff --git a/xtask/src/main.rs b/xtask/src/main.rs index d60bb25e..03af4b60 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -174,7 +174,14 @@ fn main() -> Result<(), String> { println!("[xtask] Running fmt check..."); run_cargo(&["fmt", "--check"])?; println!("[xtask] Running clippy..."); - run_cargo(&["clippy", "--workspace", "--all-targets", "--", "-D", "warnings"])?; + run_cargo(&[ + "clippy", + "--workspace", + "--all-targets", + "--", + "-D", + "warnings", + ])?; println!("[xtask] Linting completed."); } Command::CheckType => { @@ -213,7 +220,14 @@ fn main() -> Result<(), String> { println!("[xtask] Running full check..."); run_cargo(&["fmt", "--check"])?; run_cargo(&["check", "--workspace", "--exclude", "tnmsg"])?; - run_cargo(&["clippy", "--workspace", "--all-targets", "--", "-D", "warnings"])?; + run_cargo(&[ + "clippy", + "--workspace", + "--all-targets", + "--", + "-D", + "warnings", + ])?; run_cargo(&[ "test", "--workspace", From 55f1433477ec5f4cac2982538b67230f09be777e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 16:00:38 +0800 Subject: [PATCH 20/45] chore: remove accidentally committed temp files --- bug-pattern-report.json | 290 ---------------------------------------- bug_report.json | 56 -------- 2 files changed, 346 deletions(-) delete mode 100644 bug-pattern-report.json delete mode 100644 bug_report.json diff --git a/bug-pattern-report.json b/bug-pattern-report.json deleted file mode 100644 index 11884614..00000000 --- a/bug-pattern-report.json +++ /dev/null @@ -1,290 +0,0 @@ -[ - { - "pattern": "unwrap_in_production", - "file": "sdk/src/repositories/project_prompt.rs", - "line": 28, - "code": "regex_lite::Regex::new(r\"^\\s*(```|~~~)\").unwrap()", - "is_bug": false, - "note": "Static regex compilation; panics only on invalid regex which is a compile-time constant" - }, - { - "pattern": "unwrap_in_production", - "file": "sdk/src/repositories/project_prompt.rs", - "line": 30, - "code": "regex_lite::Regex::new(r\"^\\s*export\\s+default\\b\").unwrap()", - "is_bug": false, - "note": "Static regex; compile-time constant, safe" - }, - { - "pattern": "unwrap_in_production", - "file": "sdk/src/repositories/project_prompt.rs", - "line": 31, - "code": "regex_lite::Regex::new(r\"^\\s*export\\s+const\\b\").unwrap()", - "is_bug": false, - "note": "Static regex; compile-time constant, safe" - }, - { - "pattern": "unwrap_in_production", - "file": "sdk/src/repositories/project_prompt.rs", - "line": 32, - "code": "regex_lite::Regex::new(r\"^\\s*import\\b\").unwrap()", - "is_bug": false, - "note": "Static regex; compile-time constant, safe" - }, - { - "pattern": "unwrap_in_production", - "file": "sdk/src/repositories/project_prompt.rs", - "line": 302, - "code": "project.project_type.clone().unwrap()", - "is_bug": true, - "note": "PANIC RISK: Guards is_some() on line 301 but uses unwrap on clone instead of unwrap_or/expect with context. If logic changes, this panics without a meaningful message. Should use expect() or unwrap_or_default()." - }, - { - "pattern": "unwrap_in_production", - "file": "sdk/src/repositories/project_prompt.rs", - "line": 319, - "code": "matching_series.unwrap()", - "is_bug": true, - "note": "PANIC RISK: Guards is_none() on line 314 with continue, but unwrap has no error message. Should use expect() for better diagnostics if the guard ever changes." - }, - { - "pattern": "expect_in_production", - "file": "sdk/src/infra/logger/sink.rs", - "line": 113, - "code": ".expect(\"failed to spawn logger output worker\")", - "is_bug": false, - "note": "Thread spawn failure; acceptable - application cannot function without logger" - }, - { - "pattern": "allow_dead_code", - "file": "cli/src/cli.rs", - "line": 105, - "code": "#[allow(dead_code)] pub fn as_str(self) -> &'static str", - "is_bug": true, - "note": "Dead code: as_str() on ResolvedLogLevel enum is unused in production. Could be removed or used. Signals possible incomplete feature integration." - }, - { - "pattern": "allow_dead_code", - "file": "sdk/src/services/dry_run_service.rs", - "line": 17, - "code": "#[allow(dead_code)] struct PlannedOutputFile", - "is_bug": true, - "note": "Dead code: PlannedOutputFile struct fields (path, content, encoding) are never read in production. Incomplete dry-run output implementation." - }, - { - "pattern": "allow_dead_code", - "file": "sdk/src/infra/script_runtime.rs", - "line": 220, - "code": "#[allow(dead_code)] worker_path: Option", - "is_bug": true, - "note": "Dead code: ResolvePublicPathContext.worker_path is deserialized but never read. Unused field in production." - }, - { - "pattern": "allow_dead_code", - "file": "sdk/src/infra/script_runtime.rs", - "line": 222, - "code": "#[allow(dead_code)] timeout_ms: Option", - "is_bug": true, - "note": "Dead code: ResolvePublicPathContext.timeout_ms is deserialized but never read. Unused field in production." - }, - { - "pattern": "todo_comment", - "file": "gui/src-tauri/src/tray.rs", - "line": 60, - "code": "// TODO: Trigger pipeline execution via sidecar once the full IPC wiring is in place.", - "is_bug": true, - "note": "Incomplete feature: Tray 'install' action only shows/focuses window instead of triggering pipeline. Unfinished IPC wiring." - }, - { - "pattern": "println_in_production", - "file": "cli/src/bin/tnmsc-test-api.rs", - "line": 40, - "code": "println!(\"{output}\")", - "is_bug": false, - "note": "Test API binary, println acceptable for CLI output" - }, - { - "pattern": "println_in_production", - "file": "cli/src/bin/tnmsc-test-api.rs", - "line": 44, - "code": "eprintln!(\"Error: {error}\")", - "is_bug": false, - "note": "Test API binary, eprintln acceptable for error output" - }, - { - "pattern": "println_in_production", - "file": "cli/src/commands/help.rs", - "line": "4-26", - "code": "println!() (23 occurrences)", - "is_bug": false, - "note": "Help command output to stdout - println is appropriate for user-facing help text" - }, - { - "pattern": "println_in_production", - "file": "cli/src/commands/package.rs", - "line": 45, - "code": "println!(\"Hydrated {}\", path.display())", - "is_bug": true, - "note": "CLI progress output should use logger, not raw println. Inconsistent with rest of CLI." - }, - { - "pattern": "println_in_production", - "file": "cli/src/commands/package.rs", - "line": 50, - "code": "eprintln!(\"Error: {error}\")", - "is_bug": true, - "note": "Error output should use logger.error(), not raw eprintln. Inconsistent error handling." - }, - { - "pattern": "println_in_production", - "file": "cli/src/commands/pipeline.rs", - "line": 214, - "code": "println!(\"{line}\")", - "is_bug": false, - "note": "Pipeline output to stdout - intentional user-facing output" - }, - { - "pattern": "println_in_production", - "file": "cli/src/commands/pipeline.rs", - "line": 217, - "code": "eprintln!(\"{line}\")", - "is_bug": false, - "note": "Pipeline error output to stderr - intentional user-facing output" - }, - { - "pattern": "println_in_production", - "file": "cli/src/commands/version.rs", - "line": 4, - "code": "println!(\"{}\", env!(\"CARGO_PKG_VERSION\"))", - "is_bug": false, - "note": "Version output - println appropriate for CLI version command" - }, - { - "pattern": "println_in_production", - "file": "gui/src-tauri/src/lib.rs", - "line": 43, - "code": "eprintln!(\"error while running tauri application: {error}\")", - "is_bug": true, - "note": "Application startup error should use logger, not raw eprintln. Especially important for GUI where stderr may not be visible." - }, - { - "pattern": "println_in_production", - "file": "mcp/src/main.rs", - "line": 277, - "code": "eprintln!(\"JSON parse error: {}\", e)", - "is_bug": true, - "note": "MCP server error output uses eprintln instead of logger. MCP communicates via stdout, so eprintln goes to stderr which may confuse users." - }, - { - "pattern": "println_in_production", - "file": "mcp/src/commands/package.rs", - "line": 45, - "code": "println!(\"Hydrated {}\", path.display())", - "is_bug": true, - "note": "MCP package progress should use logger, not println. MCP uses stdout for JSON-RPC; println may corrupt protocol." - }, - { - "pattern": "println_in_production", - "file": "mcp/src/commands/package.rs", - "line": 50, - "code": "eprintln!(\"Error: {error}\")", - "is_bug": true, - "note": "MCP package error should use logger, not raw eprintln." - }, - { - "pattern": "println_in_production", - "file": "xtask/src/main.rs", - "line": "130-237", - "code": "println!() (22 occurrences for task progress)", - "is_bug": false, - "note": "xtask is a dev build tool, println for progress output is acceptable and standard practice" - }, - { - "pattern": "println_in_production", - "file": "sdk/build.rs", - "line": 2, - "code": "println!(\"cargo:rerun-if-changed=build.rs\")", - "is_bug": false, - "note": "Cargo build script requires println! for build directives - mandatory pattern" - }, - { - "pattern": "hardcoded_url", - "file": "mcp/src/main.rs", - "line": "84,100,114,130", - "code": "\"https://json-schema.org/draft/2020-12/schema\"", - "is_bug": false, - "note": "JSON Schema standard URL reference - constant, appropriate to hardcode" - }, - { - "pattern": "unsafe_block", - "file": "sdk/src/domain/output_plans/droid_output_plan.rs", - "line": "791-803", - "code": "unsafe { std::env::set_var(\"HOME\", ...) } (x3)", - "is_bug": false, - "note": "Test-only code (inside #[cfg(test)] module). Uses mutex-guarded env var mutation. Rust 2024 requires unsafe for set_var." - }, - { - "pattern": "unsafe_block", - "file": "sdk/src/domain/output_plans/gemini_output_plan.rs", - "line": "297-309", - "code": "unsafe { std::env::set_var(\"HOME\", ...) } (x3)", - "is_bug": false, - "note": "Test-only code. Same pattern as droid_output_plan." - }, - { - "pattern": "unsafe_block", - "file": "sdk/src/services/dry_run_service.rs", - "line": "255-265", - "code": "unsafe { std::env::set_var(...) } (x3)", - "is_bug": false, - "note": "Test-only code (inside #[cfg(test)] module)." - }, - { - "pattern": "unsafe_block", - "file": "sdk/src/services/clean_service.rs", - "line": "637-649", - "code": "unsafe { std::env::set_var(\"HOME\", ...) } (x3)", - "is_bug": false, - "note": "Test-only code. Same test helper pattern." - }, - { - "pattern": "unsafe_block", - "file": "sdk/src/infra/deno_runtime.rs", - "line": "378-398", - "code": "unsafe { std::env::remove_var/set_var(...) } (x4)", - "is_bug": false, - "note": "Test-only code (inside #[cfg(test)] module)." - }, - { - "pattern": "unsafe_block", - "file": "sdk/src/infra/logger/mod.rs", - "line": 237, - "code": "unsafe { std::env::remove_var(\"LOG_LEVEL\") }", - "is_bug": false, - "note": "Test-only code." - }, - { - "pattern": "clone_on_large_type", - "file": "sdk/src/domain/config/mod.rs", - "line": 924, - "code": "loaded.iter().map(|r| r.config.clone()).collect()", - "is_bug": false, - "note": "Clones Vec of UserConfigFile during config merging. Occurs once per pipeline run. Acceptable." - }, - { - "pattern": "clone_on_large_type", - "file": "sdk/src/domain/config/mod.rs", - "line": 970, - "code": "serde_json::from_value::(parsed.clone())", - "is_bug": false, - "note": "Clones serde_json::Value to avoid borrow. Common pattern." - }, - { - "pattern": "box_leak_or_static_hack", - "file": "N/A", - "line": "N/A", - "code": "N/A", - "is_bug": false, - "note": "No Box::leak or 'static lifetime hacks found in production code. All 'static usages are legitimate (enum as_str returns, function return types, JSON schema URLs)." - } -] diff --git a/bug_report.json b/bug_report.json deleted file mode 100644 index 1e336e00..00000000 --- a/bug_report.json +++ /dev/null @@ -1,56 +0,0 @@ -[ - { - "title": "[Kiro] Typo 'streening' instead of 'steering' in cleanup glob patterns", - "file": "sdk/src/domain/output_plans/kiro_output_plan.rs:39", - "severity": "high", - "description": "The kiro_output_plan uses '.kiro/streening' in all cleanup glob patterns (lines 39-40, 44-45, 51) instead of the correct '.kiro/steering'. All Kiro documentation (including the repo's own .claude/skills/ai-agents/kiro.md line 28) says the directory is '.kiro/steering/*.md'. This means cleanup targets a non-existent directory, and actual '.kiro/steering/' files will never be cleaned up. The test at line 190 also asserts on the wrong path, masking this bug." - }, - { - "title": "[Claude Code] build_rule_content does not filter null values from YAML metadata", - "file": "sdk/src/domain/output_plans/claude_code_output_plan.rs:242", - "severity": "medium", - "description": "In claude_code_output_plan.rs, build_rule_content (line 242) only filters empty arrays: 'metadata.retain(|_, v| !(v.is_array() && v.as_array().map(|a| a.is_empty()).unwrap_or(false)))'. But build_agent_content (line 266), build_command_content (line 296), and build_skill_content (line 324) also filter null values with '!(v.is_null() || ...)'. This inconsistency means null YAML front matter fields on rules will serialize as 'key: null' in generated YAML, potentially confusing Claude Code's parser." - }, - { - "title": "[Cursor/Warp/Windsurf] Child memory prompts dropped when agents_registered=true", - "file": "sdk/src/domain/output_plans/cursor_output_plan.rs:47", - "severity": "medium", - "description": "In cursor_output_plan.rs (lines 47-63), warp_output_plan.rs (lines 49-65), and windsurf_output_plan.rs (lines 46-62), when agents_registered is true, only global memory content is emitted per project. Project root prompts AND child memory prompts are completely skipped. This differs from the non-registered path where both root and child prompts are included. Projects with child directories (e.g., commands/, docs/) lose their subdirectory-level prompt files when AgentsOutputAdaptor is active." - }, - { - "title": "[Droid] build_markdown_with_front_matter omits YAML list indentation fix", - "file": "sdk/src/domain/output_plans/droid_output_plan.rs:611", - "severity": "medium", - "description": "The droid_output_plan uses build_markdown_with_front_matter (line 594) which calls serde_yml::to_string and wraps with '---' delimiters, but does NOT call indent_yaml_list_items. Other output plans (claude_code:347, codex:282, opencode:317) all indent list items after serde_yml serialization because serde_yml renders flat sequences (e.g., 'keywords:\\n- foo'). Droid output will have unindented list items, producing non-standard YAML front matter." - }, - { - "title": "[MCP] list_prompts tool schema declares distStatus but handler ignores it", - "file": "mcp/src/main.rs:90", - "severity": "medium", - "description": "The MCP tool definition for list_prompts (line 90) declares 'distStatus' as an input parameter in the JSON schema, but handle_list_prompts (lines 166-186) never extracts this parameter. The ListPromptsOptions struct (sdk/src/services/prompt_service.rs:66-75) has no dist_status field. Clients specifying distStatus filters will have them silently ignored." - }, - { - "title": "[MCP] apply_prompt_translation schema says enContent is optional but handler requires it", - "file": "mcp/src/main.rs:121", - "severity": "medium", - "description": "The MCP tool schema for apply_prompt_translation (line 121) declares 'required: [\"promptId\"]', making enContent appear optional. But the handler (line 241) returns an error if enContent is missing: 'apply_prompt_translation requires enContent'. Additionally, distContent (line 135) is declared in the schema but never extracted or used by the handler." - }, - { - "title": "[Codex] force_yaml_values_quoted fragile key-value splitting on first ': '", - "file": "sdk/src/domain/output_plans/codex_output_plan.rs:315", - "severity": "low", - "description": "The force_yaml_values_quoted function (line 310) uses 'line.find(\": \")' to split key from value at the first occurrence of ': '. While this works for typical metadata keys (description, argument-hint), it would produce incorrect quoting for keys that contain ': ' in their name. Currently safe because all keys are simple alphanumeric/kebab-case strings, but the function lacks a comment documenting this assumption." - }, - { - "title": "[Shared] indent_yaml_list_items duplicated across 3 output plan files", - "file": "sdk/src/domain/output_plans/claude_code_output_plan.rs:354", - "severity": "low", - "description": "The indent_yaml_list_items function is identically duplicated in claude_code_output_plan.rs:354, codex_output_plan.rs:356, and opencode_output_plan.rs:389. This violates DRY and risks divergence if one copy is updated but others are missed. Should be extracted to a shared utility module." - }, - { - "title": "[Base Plans] EditorConfig files from multiple sources could produce duplicate output paths", - "file": "sdk/src/domain/base_output_plans.rs:260", - "severity": "low", - "description": "In build_readme_plugin_plan (line 260), the nested loop iterates over editor_config_files × concrete_projects. If context.editor_config_files contains multiple .editorconfig entries from different source directories, each project will get multiple output_file entries at the same path ('project_root/.editorconfig') with potentially different content, without deduplication." - } -] From 57c4d2ef57869e0a93e7338fb801918c25bdb196 Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 14:44:59 -0700 Subject: [PATCH 21/45] perf(logger/sink): drop per-log String clone Closes #189. `send_output` cloned the formatted string for the channel send and kept the original around only for the rare sink-thread-dead fallback. On the hot path (sink alive), every `write_event` / `write_span_*` paid a `String::clone` for nothing. Move the `String` straight into the channel. `mpsc::SendError` already hands the unsent value back on the disconnect path, so we can pull it out via `if let Err(SendError(OutputCommand::Write { output, .. }))` and borrow it directly into `write_direct`. No allocation on the happy path; the cold path is unchanged. `cargo build --manifest-path sdk/Cargo.toml` clean. The 16 logger unit tests pass, including `test_thread_safety`. `cargo clippy --lib` shows no new warnings against `sink.rs` (the existing warnings on other modules are unchanged). --- sdk/src/infra/logger/sink.rs | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/sdk/src/infra/logger/sink.rs b/sdk/src/infra/logger/sink.rs index 5d5c5913..56839d4e 100644 --- a/sdk/src/infra/logger/sink.rs +++ b/sdk/src/infra/logger/sink.rs @@ -1,5 +1,5 @@ use std::io::{self, Write}; -use std::sync::mpsc::{self, Receiver, Sender}; +use std::sync::mpsc::{self, Receiver, SendError, Sender}; use std::sync::{LazyLock, Mutex}; use std::thread; @@ -81,14 +81,13 @@ pub fn flush() { // --------------------------------------------------------------------------- fn send_output(use_stderr: bool, output: String) { - if OUTPUT_SINK - .send(OutputCommand::Write { - use_stderr, - output: output.clone(), - }) - .is_err() + // Move the formatted string straight into the channel. On the rare + // sink-thread-dead path the channel hands it back via the SendError + // payload, so the fallback `write_direct` can borrow it without us + // paying a per-call `String::clone` on every log line (#189). + if let Err(SendError(OutputCommand::Write { output, .. })) = + OUTPUT_SINK.send(OutputCommand::Write { use_stderr, output }) { - // Fallback: write directly if sink thread is dead write_direct(use_stderr, &output); } } From d42ac74dbe2494355c3fc94bc5a13cfed5da6d43 Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 14:54:00 -0700 Subject: [PATCH 22/45] cleanup(localized_reader): drop unused `seen` HashSet MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #190. `read_flat_files` allocated a `HashSet` and threaded it as `&mut seen` through `scan_directory` recursion just to call `seen.insert(full_name.clone())` once per new entry. The set was never read for dedup — `entries.iter_mut().find(|e| e.name == full_name)` is the actual existence check on the line above (#191 tracks the O(n²) → HashMap follow-up; this PR is just the dead write). Removing the parameter: - Deletes a per-call `String::clone` (set insertion took ownership of a clone of the name we already used to construct the entry). - Drops the `HashSet` allocation on every call to `read_flat_files`. - Removes the `use std::collections::HashSet` import. - Simplifies `scan_directory`'s signature (4 args → 3). `cargo build` + `cargo test --lib repositories` clean (45/45 repository tests pass). `cargo clippy --lib` shows no new warnings on `localized_reader.rs`. --- sdk/src/repositories/localized_reader.rs | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/sdk/src/repositories/localized_reader.rs b/sdk/src/repositories/localized_reader.rs index 0ea9da8f..f2e4cc12 100644 --- a/sdk/src/repositories/localized_reader.rs +++ b/sdk/src/repositories/localized_reader.rs @@ -1,4 +1,4 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::path::Path; use crate::repositories::prompt_artifact::{PromptArtifact, read_prompt_artifact}; @@ -16,7 +16,6 @@ pub fn read_flat_files( global_scope_json: Option<&str>, ) -> Result, crate::CliError> { let mut entries: Vec = Vec::new(); - let mut seen: HashSet = HashSet::new(); // #253 replaces linear name lookup with an index so adding localized // variants does not degenerate into an O(n²) walk over `entries`. let mut by_name: HashMap = HashMap::new(); @@ -26,7 +25,6 @@ pub fn read_flat_files( scan_directory( dir_path, dir_path, - &mut seen, &mut by_name, &mut entries, global_scope_json, @@ -39,7 +37,6 @@ pub fn read_flat_files( fn scan_directory( root: &Path, current: &Path, - seen: &mut HashSet, by_name: &mut HashMap, entries: &mut Vec, global_scope_json: Option<&str>, @@ -48,7 +45,7 @@ fn scan_directory( let entry = entry.map_err(crate::CliError::IoError)?; let path = entry.path(); if path.is_dir() { - scan_directory(root, &path, seen, by_name, entries, global_scope_json)?; + scan_directory(root, &path, by_name, entries, global_scope_json)?; continue; } let Some(file_name) = path.file_name().and_then(|s| s.to_str()) else { @@ -108,7 +105,6 @@ fn scan_directory( existing.compiled = Some(artifact); } } else { - seen.insert(full_name.clone()); by_name.insert(full_name.clone(), entries.len()); let mut e = FlatFileEntry { name: full_name, From d3fb82661f10c43abe3755b2c2d04524c93041ca Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 15:04:59 -0700 Subject: [PATCH 23/45] fix(path_blocking): close TOCTOU between exists() and symlink_metadata() MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #192. `remove_blocking_file` checked `path.exists()` and then called `symlink_metadata(path)` separately. Between those two syscalls another process could create, replace, or delete the entry — the classic TOCTOU shape. Practically, an entry that disappears between the two stats produces a `symlink_metadata` `NotFound` error that gets bubbled to the caller as `Err(...)`, when the previous `exists()` already reported it absent (we'd want `Ok(false)`). Drop the redundant `exists()` and treat `symlink_metadata`'s `NotFound` directly as the "nothing to remove" case. One stat, race-free. Test: new `remove_blocking_file_returns_false_for_missing_path` covers the path that `exists()` previously short-circuited; the two existing tests (`remove_blocking_file_deletes_file` / `_skips_directory`) still pass. (The two `finds_blocking_file_in_directory_path` / `resolve_blocking_for_file_target` test failures on macOS are pre-existing `/var` symlink-resolution issues, unrelated to this patch.) --- sdk/src/policy/path_blocking.rs | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/sdk/src/policy/path_blocking.rs b/sdk/src/policy/path_blocking.rs index 8036427b..4e9e1a1d 100644 --- a/sdk/src/policy/path_blocking.rs +++ b/sdk/src/policy/path_blocking.rs @@ -66,14 +66,16 @@ pub fn resolve_blocking_file_path(path: &str, target_kind: &str, error: &str) -> pub fn remove_blocking_file(blocking_path: &str) -> Result { let path = Path::new(blocking_path); - if !path.exists() { - return Ok(false); - } + // Single stat — `path.exists()` followed by `symlink_metadata` was a + // TOCTOU race where another process could create / replace the entry + // between the two syscalls. Use `symlink_metadata` directly and treat + // `NotFound` as the "nothing to remove" case (#192). match std::fs::symlink_metadata(path) { Ok(meta) if meta.is_dir() => Ok(false), Ok(_) => std::fs::remove_file(path) .map(|_| true) .map_err(|e| e.to_string()), + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(false), Err(e) => Err(e.to_string()), } } @@ -138,4 +140,17 @@ mod tests { assert!(!remove_blocking_file(&blocking_dir.to_string_lossy()).unwrap()); assert!(blocking_dir.exists()); } + + // Regression for #192: a missing path must return Ok(false) instead of + // bubbling up the `NotFound` error from `symlink_metadata`. Pre-fix the + // function relied on `path.exists()` first; the single-stat path now + // has to translate NotFound itself. + #[test] + fn remove_blocking_file_returns_false_for_missing_path() { + let dir = tempdir().unwrap(); + let missing = dir.path().join("does-not-exist"); + + let result = remove_blocking_file(&missing.to_string_lossy()).unwrap(); + assert!(!result, "missing path should report nothing to remove"); + } } From b4dde5ec366c02fa032a0a1f2a9c3cf8acef8421 Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 15:23:18 -0700 Subject: [PATCH 24/45] fix(cleanup): log WalkDir entry errors instead of silently skipping Closes #200. `BatchedGlobPlanner::execute()` walked the cleanup roots with `WalkDir` and used `let Ok(entry) = entry else { continue; }` to discard any iteration errors. Permission-denied entries, broken symlinks, and races where a tree got partially deleted mid-walk were skipped without a single trace, so a cleanup running under insufficient privileges (or against a half-removed shadow tree) would quietly miss files and the operator had no way to correlate the missed delete with the underlying syscall failure. Replace the silent continue with a `match`: on `Err` emit a `debug` log via the existing structured logger with the offending path (when `WalkDir` records it) and the `io::Error` message, then fall through to `continue` so behaviour is unchanged for the happy path. Operators tailing the cleanup logs now see exactly which entries were skipped and why; tests' `walked_entries` accounting is unaffected since the increment still only fires on `Ok(entry)`. `cargo test --lib policy::cleanup` is green (31/31). --- sdk/src/policy/cleanup.rs | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/sdk/src/policy/cleanup.rs b/sdk/src/policy/cleanup.rs index 601ab2bd..3ec7963a 100644 --- a/sdk/src/policy/cleanup.rs +++ b/sdk/src/policy/cleanup.rs @@ -511,8 +511,28 @@ impl BatchedGlobPlanner { }); for entry in walker { - let Ok(entry) = entry else { - continue; + let entry = match entry { + Ok(e) => e, + Err(e) => { + // Pre-#200 these errors (permission denied, broken + // symlinks, ENOENT during walk) were silently dropped, so + // a cleanup that ran with insufficient privileges or + // across a half-deleted tree would skip files without + // surfacing why. Emit a debug log with the offending + // path + io::Error message so operators can correlate a + // missed delete with the underlying syscall failure + // without changing the "skip and continue" behaviour. + let path_text = e.path().map(|p| p.display().to_string()).unwrap_or_default(); + crate::debug!( + logger, + "cleanup native walkdir entry skipped", + json!({ + "path": path_text, + "error": e.to_string(), + }) + ); + continue; + } }; walked_entries += 1; From 57d352c291b74149a6a528e781f8e964cf9f4928 Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 15:36:27 -0700 Subject: [PATCH 25/45] fix(dependency_resolver): map JSON parse errors to InvalidInput variant MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #209. `topological_sort(input_json)` previously rolled JSON parse failures into `DependencyResolverError::MissingDependency` with `node_name: "invalid input: …"` and an empty `missing_dependency`. Downstream consumers matching on the serialised `kind` field — the discriminator declared by `#[serde(tag = "kind", rename_all = "camelCase")]` — would treat parse failures as graph problems instead of input-shape problems. Add an `InvalidInput { message: String }` variant and route the serde error there. The `kind` payload now reads `invalidInput`, which the JS-side wrapper (and any future consumer) can branch on cleanly without string-sniffing the `node_name`. The two existing variants and their behaviour are unchanged. Tests in `dependency_resolver.rs` gain `topological_sort_maps_invalid_json_to_invalid_input` (asserts the new variant + `kind: "invalidInput"` payload) and `topological_sort_handles_well_formed_input` (sanity that the happy path still produces a sorted list). 13/13 module tests pass. --- sdk/src/policy/dependency_resolver.rs | 37 ++++++++++++++++++++++++--- 1 file changed, 34 insertions(+), 3 deletions(-) diff --git a/sdk/src/policy/dependency_resolver.rs b/sdk/src/policy/dependency_resolver.rs index 2e955583..2b2f30b7 100644 --- a/sdk/src/policy/dependency_resolver.rs +++ b/sdk/src/policy/dependency_resolver.rs @@ -10,6 +10,14 @@ pub enum DependencyResolverError { CircularDependency { cycle_path: Vec, }, + /// The caller passed a string that didn't deserialise into a valid + /// `Vec`. Pre-#209 this was reported as + /// `MissingDependency { node_name: "invalid input: …", missing_dependency: "" }` + /// — which made downstream consumers matching on the serialised + /// `kind` field treat parse failures as graph problems. + InvalidInput { + message: String, + }, } #[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] @@ -159,9 +167,8 @@ pub fn topological_sort_nodes( pub fn topological_sort(input_json: &str) -> Result { let nodes: Vec = - serde_json::from_str(input_json).map_err(|e| DependencyResolverError::MissingDependency { - node_name: format!("invalid input: {}", e), - missing_dependency: String::new(), + serde_json::from_str(input_json).map_err(|e| DependencyResolverError::InvalidInput { + message: e.to_string(), })?; topological_sort_nodes(&nodes).map(|sorted| serde_json::to_string(&sorted).unwrap()) @@ -301,4 +308,28 @@ mod tests { assert!(path.contains(&"c".to_string())); assert!(!path.contains(&"a".to_string())); } + + // Regression for #209: invalid input must surface as InvalidInput, not + // as a synthesised MissingDependency. The serialised payload's `kind` + // field is the consumer-facing discriminator. + #[test] + fn topological_sort_maps_invalid_json_to_invalid_input() { + let err = topological_sort("not json").unwrap_err(); + match &err { + DependencyResolverError::InvalidInput { message } => { + assert!(!message.is_empty(), "expected serde error message"); + } + _ => panic!("Expected InvalidInput, got {:?}", err), + } + + let payload = serde_json::to_value(&err).unwrap(); + assert_eq!(payload["kind"], "invalidInput"); + } + + #[test] + fn topological_sort_handles_well_formed_input() { + let result = topological_sort(r#"[{"name":"a","dependsOn":[]}]"#).unwrap(); + let sorted: Vec = serde_json::from_str(&result).unwrap(); + assert_eq!(sorted, vec!["a"]); + } } From 2a7e724227e8f747f1c3c4f2254b28c83375083e Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 15:46:22 -0700 Subject: [PATCH 26/45] fix(config): surface parent-mkdir failure before write_config attempt MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #188. `write_config` did `let _ = fs::create_dir_all(parent)` and discarded the error. If `mkdir -p` failed (perms, EROFS, ENOSPC), the subsequent `fs::write(path, …)` would fail too — but with a secondary "No such file or directory" error that didn't point at the actual cause. Operators saw `CONFIG_WRITE_FAILED` and assumed the destination was unwritable, when really the parent never existed because `create_dir_all` had been silently swallowed. Bind the result and emit a `CONFIG_PARENT_DIR_CREATE_FAILED` diagnostic on `Err`, then `return` so we don't pile a confusing `CONFIG_WRITE_FAILED` on top. Operators now see the real failing step with the parent path + io::Error message, and the suggestion points at "parent path is writable / not on read-only or full filesystem" rather than the generic destination check. The serialization and write paths are unchanged. Behaviour on the happy path (parent exists, mkdir succeeds, write succeeds) is identical. `cargo build --manifest-path sdk/Cargo.toml` clean. `cargo test --lib domain::config` is green (24/24). --- sdk/src/domain/config/mod.rs | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/sdk/src/domain/config/mod.rs b/sdk/src/domain/config/mod.rs index d956cd71..3064e078 100644 --- a/sdk/src/domain/config/mod.rs +++ b/sdk/src/domain/config/mod.rs @@ -994,8 +994,25 @@ pub fn load_user_config(cwd: &Path) -> Result { pub fn write_config(path: &Path, config: &UserConfigFile, logger: &Logger) { if let Some(parent) = path.parent() && !parent.exists() + && let Err(e) = fs::create_dir_all(parent) { - let _ = fs::create_dir_all(parent); + // Pre-#188 the result was discarded; the subsequent `fs::write` + // would then fail with a confusing "No such file or directory" + // when the real cause was a parent-creation problem (permissions, + // EROFS, ENOSPC, etc.). Surface it as its own diagnostic so the + // operator sees the actual failing step before the redundant + // CONFIG_WRITE_FAILED that follows. + logger.warn(diagnostic( + "CONFIG_PARENT_DIR_CREATE_FAILED", + "Failed to create the config file's parent directory", + line("The CLI tried to create the directory holding the config file but the syscall failed."), + Some(line( + "Check that the parent path is writable and not on a read-only or full filesystem.", + )), + None, + path_error_details(parent, &e.to_string()), + )); + return; } match serde_json::to_string_pretty(config) { From d8d765f9ea9a2bdd8dc97194791ce0b23e3aaf8f Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 15:55:31 -0700 Subject: [PATCH 27/45] refactor(project_prompt): replace is_some/is_none + unwrap with let-Some / match MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #196. Closes #197. Two adjacent unwrap-anti-patterns in `enhance_workspace_projects_with_aindex`: - L301-302: `if project.project_type.is_some() { vec![project.project_type.clone().unwrap()] }` → `match &project.project_type { Some(ptype) => vec![ptype.clone()], None => … }` Removes the redundant `is_some()` check, drops the `unwrap()`, and clones only the borrow we actually need (#196). - L307-319: `let matching_series = …find(…); if matching_series.is_none() { continue }; let series_name = matching_series.unwrap();` → `let Some(series_name) = …find(…) else { … continue }; …` Single binding, no redundant `is_none()` re-check, no `unwrap()` on the path that already proved `Some` (#197). Behaviour is identical; the change is purely an idiom cleanup. The existing `repositories::project_prompt` test coverage (rolled into the 45-test `repositories` suite) stays green. --- sdk/src/repositories/project_prompt.rs | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/sdk/src/repositories/project_prompt.rs b/sdk/src/repositories/project_prompt.rs index 3373831f..b66370c6 100644 --- a/sdk/src/repositories/project_prompt.rs +++ b/sdk/src/repositories/project_prompt.rs @@ -265,25 +265,20 @@ pub fn collect_project_prompt(options_json: &str) -> Result = if project.project_type.is_some() { - vec![project.project_type.clone().unwrap()] - } else { - SERIES_NAMES.iter().map(|&s| s.to_string()).collect() + let series_configs: Vec = match &project.project_type { + Some(ptype) => vec![ptype.clone()], + None => SERIES_NAMES.iter().map(|&s| s.to_string()).collect(), }; - let matching_series = series_configs.iter().find(|series_name| { + let Some(series_name) = series_configs.iter().find(|series_name| { let project_path = config::resolve_workspace_aindex_source_series_dir(&workspace_dir_str, series_name) .join(project_name); project_path.is_dir() - }); - - if matching_series.is_none() { + }) else { enhanced_projects.push(project); continue; - } - - let series_name = matching_series.unwrap(); + }; let shadow_project_path = config::resolve_workspace_aindex_source_series_dir(&workspace_dir_str, series_name) .join(project_name); From fb1efbbe84c7c988655d329ea0ca283ea04791b1 Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 16:51:59 -0700 Subject: [PATCH 28/45] fix(git_discovery): collect all nested modules subtrees, not just the last Closes #211. `find_git_module_info_dirs::walk` tracked the nested \`modules\` dir in a single \`Option\`, so a directory listing that yielded more than one entry whose \`file_name()\` matched \`modules\` would keep only the last one walked. The case is unusual on case-sensitive filesystems but reachable on case-insensitive ones (macOS / NTFS) where \`modules\` and \`Modules\` can collide as dir entries from different writers, and a future relax of the equality match (icase, alt names) would silently start dropping subtrees. Replace the \`Option\` with a \`Vec\` and walk each collected nested-modules dir in turn. The \`return\` on the inner \`fs::read_dir\` error becomes a \`continue\` so a single unreadable nested dir no longer aborts the walk for sibling subtrees in the same parent. `cargo test --lib policy::git_discovery` is green (3/3, including \`test_find_git_module_info_dirs_finds_nested_submodules\`). --- sdk/src/policy/git_discovery.rs | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/sdk/src/policy/git_discovery.rs b/sdk/src/policy/git_discovery.rs index d0cb2bf4..ea8a5599 100644 --- a/sdk/src/policy/git_discovery.rs +++ b/sdk/src/policy/git_discovery.rs @@ -21,7 +21,15 @@ pub fn find_git_module_info_dirs(dot_git_dir: &Path) -> Vec { }; let mut has_info = false; - let mut nested_modules = None; + // Pre-#211 this was a single `Option`, so a directory + // listing that yielded more than one entry whose `file_name()` + // matched `modules` would only keep the last one walked. The case + // is unusual on case-sensitive filesystems but reachable on + // case-insensitive ones (macOS / NTFS) where `modules` and + // `Modules` can collide as dir entries from different writers, + // and a future relax of the equality match (icase, alt names) + // would silently start dropping subtrees. Collect them all. + let mut nested_modules_dirs: Vec = Vec::new(); for entry in entries.flatten() { let name = entry.file_name(); @@ -35,7 +43,7 @@ pub fn find_git_module_info_dirs(dot_git_dir: &Path) -> Vec { && let Ok(ft) = entry.file_type() && ft.is_dir() { - nested_modules = Some(entry.path()); + nested_modules_dirs.push(entry.path()); } } @@ -43,10 +51,10 @@ pub fn find_git_module_info_dirs(dot_git_dir: &Path) -> Vec { results.push(dir.join("info")); } - if let Some(nested) = nested_modules { + for nested in nested_modules_dirs { let sub_entries = match fs::read_dir(&nested) { Ok(e) => e, - Err(_) => return, + Err(_) => continue, }; for entry in sub_entries.flatten() { if let Ok(ft) = entry.file_type() From c7967f16549ac5218c1463f7a485d9197ac32d51 Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 20:28:26 -0700 Subject: [PATCH 29/45] fix(logger): bound flush() wait with timeout to prevent caller deadlock (#187) Signed-off-by: SAY-5 --- sdk/src/infra/logger/sink.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sdk/src/infra/logger/sink.rs b/sdk/src/infra/logger/sink.rs index 56839d4e..57e3f502 100644 --- a/sdk/src/infra/logger/sink.rs +++ b/sdk/src/infra/logger/sink.rs @@ -66,13 +66,17 @@ pub fn clear_diagnostics() { } } +/// Bound the worker-drain wait so a wedged worker (deadlocked, sigstop'd, +/// blocked on a slow stdout pipe) can't hang process shutdown indefinitely. +const FLUSH_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(5); + pub fn flush() { let (ack_tx, ack_rx) = mpsc::channel(); if OUTPUT_SINK .send(OutputCommand::Flush { ack: ack_tx }) .is_ok() { - let _ = ack_rx.recv(); + let _ = ack_rx.recv_timeout(FLUSH_TIMEOUT); } } From 0e82c05f387551625e7ee8cb5424123ae90b3d04 Mon Sep 17 00:00:00 2001 From: SAY-5 Date: Wed, 29 Apr 2026 20:35:06 -0700 Subject: [PATCH 30/45] fix(mcp): route assemble-npm output to stderr to keep stdout protocol-safe (#225) Signed-off-by: SAY-5 --- mcp/src/commands/package.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/mcp/src/commands/package.rs b/mcp/src/commands/package.rs index 50bea94d..66aba2a9 100644 --- a/mcp/src/commands/package.rs +++ b/mcp/src/commands/package.rs @@ -41,8 +41,12 @@ const PACKAGE_TARGETS: &[PackageTarget] = &[ pub fn execute(args: &AssembleNpmArgs) -> ExitCode { match assemble_packages(args) { Ok(copied) => { + // Use stderr: this binary's primary mode is the MCP stdio server, + // and stdout is reserved for JSON-RPC framing. Routing all + // assemble-npm chatter to stderr keeps stdout protocol-safe even + // if the subcommand is ever invoked from a wrapped context. for path in copied { - println!("Hydrated {}", path.display()); + eprintln!("Hydrated {}", path.display()); } ExitCode::SUCCESS } From a42dee9463ff6eb9022e9b217832b7ba319c9b4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 16:15:51 +0800 Subject: [PATCH 31/45] style: cargo fmt fixes --- sdk/src/policy/cleanup.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sdk/src/policy/cleanup.rs b/sdk/src/policy/cleanup.rs index 3ec7963a..3cc4a8d6 100644 --- a/sdk/src/policy/cleanup.rs +++ b/sdk/src/policy/cleanup.rs @@ -522,7 +522,10 @@ impl BatchedGlobPlanner { // path + io::Error message so operators can correlate a // missed delete with the underlying syscall failure // without changing the "skip and continue" behaviour. - let path_text = e.path().map(|p| p.display().to_string()).unwrap_or_default(); + let path_text = e + .path() + .map(|p| p.display().to_string()) + .unwrap_or_default(); crate::debug!( logger, "cleanup native walkdir entry skipped", From 642d012df1900ff55ab7fcb117aa0f0ef8d9cc58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 22:24:36 +0800 Subject: [PATCH 32/45] chore: remove integrate-tests directories and update references - Deleted cli/integrate-tests/ and mcp/integrate-tests/ - Removed workspace members from Cargo.toml - Updated CI workflow to remove integration test references and point packaging smoke to local-tests - Updated xtask to remove integration test excludes - Removed integrate-tests .gitignore entry - Updated AGENTS.md and CLAUDE.md in root, cli, and mcp to remove integration test docs - Regenerated Cargo.lock (removed testcontainers and related deps) --- .github/workflows/ci.yml | 6 +- .gitignore | 1 - Cargo.lock | 595 +----------- Cargo.toml | 2 - cli/integrate-tests/Cargo.toml | 15 - cli/integrate-tests/src/lib.rs | 915 ------------------ cli/integrate-tests/tests/clean_blackbox.rs | 100 -- cli/integrate-tests/tests/command_contract.rs | 28 - cli/integrate-tests/tests/install_smoke.rs | 99 -- cli/integrate-tests/tests/packaging_smoke.rs | 90 -- .../tests/project_config_ts.rs | 189 ---- cli/integrate-tests/tests/proxy_mapping.rs | 148 --- .../tests/proxy_runtime_locations.rs | 132 --- .../tests/public_dir_mapped_to_projects.rs | 219 ----- mcp/integrate-tests/Cargo.toml | 13 - mcp/integrate-tests/src/lib.rs | 516 ---------- mcp/integrate-tests/tests/packaging_smoke.rs | 80 -- xtask/src/main.rs | 8 - 18 files changed, 4 insertions(+), 3152 deletions(-) delete mode 100644 cli/integrate-tests/Cargo.toml delete mode 100644 cli/integrate-tests/src/lib.rs delete mode 100644 cli/integrate-tests/tests/clean_blackbox.rs delete mode 100644 cli/integrate-tests/tests/command_contract.rs delete mode 100644 cli/integrate-tests/tests/install_smoke.rs delete mode 100644 cli/integrate-tests/tests/packaging_smoke.rs delete mode 100644 cli/integrate-tests/tests/project_config_ts.rs delete mode 100644 cli/integrate-tests/tests/proxy_mapping.rs delete mode 100644 cli/integrate-tests/tests/proxy_runtime_locations.rs delete mode 100644 cli/integrate-tests/tests/public_dir_mapped_to_projects.rs delete mode 100644 mcp/integrate-tests/Cargo.toml delete mode 100644 mcp/integrate-tests/src/lib.rs delete mode 100644 mcp/integrate-tests/tests/packaging_smoke.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6b30e439..05717754 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -74,7 +74,7 @@ jobs: run: cargo run -p xtask -- build - name: Rust unit tests - run: cargo test --workspace --exclude tnmsg --exclude tnmsc-integrate-tests --exclude tnmsc-local-tests --exclude tnmsm-integrate-tests --lib --bins + run: cargo test --workspace --exclude tnmsg --exclude tnmsc-local-tests --lib --bins packaging-smoke: if: github.event_name != 'pull_request' || github.event.pull_request.draft == false @@ -93,10 +93,10 @@ jobs: run: cargo build --release -p tnmsc -p tnmsm - name: CLI packaging smoke - run: cargo test -p tnmsc-integrate-tests packaging_smoke_covers_release_binary_and_global_install -- --exact --nocapture + run: cargo test -p tnmsc-local-tests packaging_smoke_covers_release_binary_and_global_install -- --exact --nocapture - name: MCP packaging smoke - run: cargo test -p tnmsm-integrate-tests packaging_smoke_covers_release_binary_and_global_install -- --exact --nocapture + run: cargo test -p tnmsm-local-tests packaging_smoke_covers_release_binary_and_global_install -- --exact --nocapture gui-smoke: needs: changes diff --git a/.gitignore b/.gitignore index 81ca7e55..bb471c27 100644 --- a/.gitignore +++ b/.gitignore @@ -24,7 +24,6 @@ cli/npm/**/*.node cli/npm/**/bin/ cli/npm/**/tnmsc cli/npm/**/tnmsc.exe -cli/integrate-tests/.tmp/ libraries/**/dist/*.node **/target/ !**/Cargo.lock diff --git a/Cargo.lock b/Cargo.lock index 7abdfb95..92d42cf7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -150,55 +150,6 @@ dependencies = [ "syn 2.0.117", ] -[[package]] -name = "astral-tokio-tar" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c23f3af104b40a3430ccb90ed5f7bd877a8dc5c26fc92fde51a22b40890dcf9" -dependencies = [ - "filetime", - "futures-core", - "libc", - "portable-atomic", - "rustc-hash", - "tokio", - "tokio-stream", - "xattr", -] - -[[package]] -name = "async-stream" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.117", -] - -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.117", -] - [[package]] name = "atk" version = "0.18.2" @@ -256,49 +207,6 @@ dependencies = [ "fs_extra", ] -[[package]] -name = "axum" -version = "0.8.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31b698c5f9a010f6573133b09e0de5408834d0c82f8d7475a89fc1867a71cd90" -dependencies = [ - "axum-core", - "bytes", - "futures-util", - "http", - "http-body", - "http-body-util", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "serde_core", - "sync_wrapper", - "tower", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-core" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1" -dependencies = [ - "bytes", - "futures-core", - "http", - "http-body", - "http-body-util", - "mime", - "pin-project-lite", - "sync_wrapper", - "tower-layer", - "tower-service", -] - [[package]] name = "az" version = "1.3.0" @@ -354,7 +262,7 @@ dependencies = [ "bitflags 2.11.0", "cexpr", "clang-sys", - "itertools 0.13.0", + "itertools", "log", "prettyplease", "proc-macro2", @@ -434,80 +342,6 @@ dependencies = [ "objc2", ] -[[package]] -name = "bollard" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee04c4c84f1f811b017f2fbb7dd8815c976e7ca98593de9c1e2afad0f636bff4" -dependencies = [ - "async-stream", - "base64 0.22.1", - "bitflags 2.11.0", - "bollard-buildkit-proto", - "bollard-stubs", - "bytes", - "futures-core", - "futures-util", - "hex", - "home", - "http", - "http-body-util", - "hyper", - "hyper-named-pipe", - "hyper-rustls", - "hyper-util", - "hyperlocal", - "log", - "num", - "pin-project-lite", - "rand 0.9.2", - "rustls", - "rustls-native-certs", - "rustls-pki-types", - "serde", - "serde_derive", - "serde_json", - "serde_urlencoded", - "thiserror 2.0.18", - "time", - "tokio", - "tokio-stream", - "tokio-util", - "tonic", - "tower-service", - "url", - "winapi", -] - -[[package]] -name = "bollard-buildkit-proto" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85a885520bf6249ab931a764ffdb87b0ceef48e6e7d807cfdb21b751e086e1ad" -dependencies = [ - "prost", - "prost-types", - "tonic", - "tonic-prost", - "ureq", -] - -[[package]] -name = "bollard-stubs" -version = "1.52.1-rc.29.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0a8ca8799131c1837d1282c3f81f31e76ceb0ce426e04a7fe1ccee3287c066" -dependencies = [ - "base64 0.22.1", - "bollard-buildkit-proto", - "bytes", - "prost", - "serde", - "serde_json", - "serde_repr", - "time", -] - [[package]] name = "boxed_error" version = "0.2.3" @@ -755,17 +589,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" -[[package]] -name = "chacha20" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601" -dependencies = [ - "cfg-if", - "cpufeatures 0.3.0", - "rand_core 0.10.1", -] - [[package]] name = "chrono" version = "0.4.44" @@ -1436,17 +1259,6 @@ dependencies = [ "syn 2.0.117", ] -[[package]] -name = "docker_credential" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d89dfcba45b4afad7450a99b39e751590463e45c04728cf555d36bb66940de8" -dependencies = [ - "base64 0.21.7", - "serde", - "serde_json", -] - [[package]] name = "dpi" version = "0.1.2" @@ -1561,16 +1373,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "etcetera" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de48cc4d1c1d97a20fd819def54b890cadde72ed3ad0c614822a0a433361be96" -dependencies = [ - "cfg-if", - "windows-sys 0.61.2", -] - [[package]] name = "fastrand" version = "2.3.0" @@ -1586,17 +1388,6 @@ dependencies = [ "simd-adler32", ] -[[package]] -name = "ferroid" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee93edf3c501f0035bbeffeccfed0b79e14c311f12195ec0e661e114a0f60da4" -dependencies = [ - "portable-atomic", - "rand 0.10.1", - "web-time", -] - [[package]] name = "field-offset" version = "0.3.6" @@ -1977,7 +1768,6 @@ dependencies = [ "cfg-if", "libc", "r-efi", - "rand_core 0.10.1", "wasip2", "wasip3", ] @@ -2152,25 +1942,6 @@ dependencies = [ "crc32fast", ] -[[package]] -name = "h2" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" -dependencies = [ - "atomic-waker", - "bytes", - "fnv", - "futures-core", - "futures-sink", - "http", - "indexmap 2.13.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - [[package]] name = "hashbrown" version = "0.12.3" @@ -2300,12 +2071,6 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" -[[package]] -name = "httpdate" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" - [[package]] name = "hybrid-array" version = "0.4.8" @@ -2325,11 +2090,9 @@ dependencies = [ "bytes", "futures-channel", "futures-core", - "h2", "http", "http-body", "httparse", - "httpdate", "itoa", "pin-project-lite", "pin-utils", @@ -2338,21 +2101,6 @@ dependencies = [ "want", ] -[[package]] -name = "hyper-named-pipe" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b7d8abf35697b81a825e386fc151e0d503e8cb5fcb93cc8669c376dfd6f278" -dependencies = [ - "hex", - "hyper", - "hyper-util", - "pin-project-lite", - "tokio", - "tower-service", - "winapi", -] - [[package]] name = "hyper-rustls" version = "0.27.7" @@ -2369,19 +2117,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "hyper-timeout" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" -dependencies = [ - "hyper", - "hyper-util", - "pin-project-lite", - "tokio", - "tower-service", -] - [[package]] name = "hyper-util" version = "0.1.20" @@ -2405,21 +2140,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "hyperlocal" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "986c5ce3b994526b3cd75578e62554abd09f0899d6206de48b3e96ab34ccc8c7" -dependencies = [ - "hex", - "http-body-util", - "hyper", - "hyper-util", - "pin-project-lite", - "tokio", - "tower-service", -] - [[package]] name = "iana-time-zone" version = "0.1.65" @@ -2724,15 +2444,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.17" @@ -3039,12 +2750,6 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" -[[package]] -name = "matchit" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" - [[package]] name = "memchr" version = "2.8.0" @@ -3179,20 +2884,6 @@ dependencies = [ "minimal-lexical", ] -[[package]] -name = "num" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" -dependencies = [ - "num-bigint", - "num-complex", - "num-integer", - "num-iter", - "num-rational", - "num-traits", -] - [[package]] name = "num-bigint" version = "0.4.6" @@ -3205,15 +2896,6 @@ dependencies = [ "serde", ] -[[package]] -name = "num-complex" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" -dependencies = [ - "num-traits", -] - [[package]] name = "num-conv" version = "0.2.0" @@ -3229,28 +2911,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-iter" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-rational" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" -dependencies = [ - "num-bigint", - "num-integer", - "num-traits", -] - [[package]] name = "num-traits" version = "0.2.19" @@ -3560,31 +3220,6 @@ dependencies = [ "windows-link 0.2.1", ] -[[package]] -name = "parse-display" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "914a1c2265c98e2446911282c6ac86d8524f495792c38c5bd884f80499c7538a" -dependencies = [ - "parse-display-derive", - "regex", - "regex-syntax", -] - -[[package]] -name = "parse-display-derive" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ae7800a4c974efd12df917266338e79a7a74415173caf7e70aa0a0707345281" -dependencies = [ - "proc-macro2", - "quote", - "regex", - "regex-syntax", - "structmeta", - "syn 2.0.117", -] - [[package]] name = "paste" version = "1.0.15" @@ -3801,12 +3436,6 @@ dependencies = [ "miniz_oxide", ] -[[package]] -name = "portable-atomic" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" - [[package]] name = "potential_utf" version = "0.1.4" @@ -3936,38 +3565,6 @@ dependencies = [ "unarray", ] -[[package]] -name = "prost" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2ea70524a2f82d518bce41317d0fae74151505651af45faf1ffbd6fd33f0568" -dependencies = [ - "bytes", - "prost-derive", -] - -[[package]] -name = "prost-derive" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27c6023962132f4b30eb4c172c91ce92d933da334c59c23cddee82358ddafb0b" -dependencies = [ - "anyhow", - "itertools 0.14.0", - "proc-macro2", - "quote", - "syn 2.0.117", -] - -[[package]] -name = "prost-types" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8991c4cbdb8bc5b11f0b074ffe286c30e523de90fee5ba8132f1399f23cb3dd7" -dependencies = [ - "prost", -] - [[package]] name = "psm" version = "0.1.30" @@ -4105,17 +3702,6 @@ dependencies = [ "rand_core 0.9.5", ] -[[package]] -name = "rand" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2e8e8bcc7961af1fdac401278c6a831614941f6164ee3bf4ce61b7edb162207" -dependencies = [ - "chacha20", - "getrandom 0.4.1", - "rand_core 0.10.1", -] - [[package]] name = "rand_chacha" version = "0.2.2" @@ -4173,12 +3759,6 @@ dependencies = [ "getrandom 0.3.4", ] -[[package]] -name = "rand_core" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63b8176103e19a2643978565ca18b50549f6101881c443590420e4dc998a3c69" - [[package]] name = "rand_hc" version = "0.2.0" @@ -4409,7 +3989,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7160e3e10bf4535308537f3c4e1641468cd0e485175d6163087c0393c7d46643" dependencies = [ "aws-lc-rs", - "log", "once_cell", "ring", "rustls-pki-types", @@ -4756,18 +4335,6 @@ dependencies = [ "serde_core", ] -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - [[package]] name = "serde_v8" version = "0.308.0" @@ -5118,29 +4685,6 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" -[[package]] -name = "structmeta" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e1575d8d40908d70f6fd05537266b90ae71b15dbbe7a8b7dffa2b759306d329" -dependencies = [ - "proc-macro2", - "quote", - "structmeta-derive", - "syn 2.0.117", -] - -[[package]] -name = "structmeta-derive" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.117", -] - [[package]] name = "strum" version = "0.27.2" @@ -6072,37 +5616,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "testcontainers" -version = "0.27.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfd5785b5483672915ed5fe3cddf9f546802779fc1eceff0a6fb7321fac81c1e" -dependencies = [ - "astral-tokio-tar", - "async-trait", - "bollard", - "bytes", - "docker_credential", - "either", - "etcetera", - "ferroid", - "futures", - "http", - "itertools 0.14.0", - "log", - "memchr", - "parse-display", - "pin-project-lite", - "serde", - "serde_json", - "serde_with", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tokio-util", - "url", -] - [[package]] name = "text_lines" version = "0.6.0" @@ -6230,16 +5743,6 @@ dependencies = [ "tnmsd", ] -[[package]] -name = "tnmsc-integrate-tests" -version = "2026.10425.10602" -dependencies = [ - "flate2", - "serde_json", - "tar", - "testcontainers", -] - [[package]] name = "tnmsc-local-tests" version = "2026.10425.10602" @@ -6301,14 +5804,6 @@ dependencies = [ "tnmsd", ] -[[package]] -name = "tnmsm-integrate-tests" -version = "2026.10425.10602" -dependencies = [ - "serde_json", - "testcontainers", -] - [[package]] name = "tokio" version = "1.50.0" @@ -6347,17 +5842,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-stream" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", -] - [[package]] name = "tokio-util" version = "0.7.18" @@ -6467,46 +5951,6 @@ version = "1.0.6+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" -[[package]] -name = "tonic" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fec7c61a0695dc1887c1b53952990f3ad2e3a31453e1f49f10e75424943a93ec" -dependencies = [ - "async-trait", - "axum", - "base64 0.22.1", - "bytes", - "h2", - "http", - "http-body", - "http-body-util", - "hyper", - "hyper-timeout", - "hyper-util", - "percent-encoding", - "pin-project", - "socket2", - "sync_wrapper", - "tokio", - "tokio-stream", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tonic-prost" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a55376a0bbaa4975a3f10d009ad763d8f4108f067c7c2e74f3001fb49778d309" -dependencies = [ - "bytes", - "prost", - "tonic", -] - [[package]] name = "tower" version = "0.5.3" @@ -6515,15 +5959,11 @@ checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" dependencies = [ "futures-core", "futures-util", - "indexmap 2.13.0", "pin-project-lite", - "slab", "sync_wrapper", "tokio", - "tokio-util", "tower-layer", "tower-service", - "tracing", ] [[package]] @@ -6732,33 +6172,6 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" -[[package]] -name = "ureq" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dea7109cdcd5864d4eeb1b58a1648dc9bf520360d7af16ec26d0a9354bafcfc0" -dependencies = [ - "base64 0.22.1", - "log", - "percent-encoding", - "rustls", - "rustls-pki-types", - "ureq-proto", - "utf8-zero", -] - -[[package]] -name = "ureq-proto" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e994ba84b0bd1b1b0cf92878b7ef898a5c1760108fe7b6010327e274917a808c" -dependencies = [ - "base64 0.22.1", - "http", - "httparse", - "log", -] - [[package]] name = "url" version = "2.5.8" @@ -6790,12 +6203,6 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" -[[package]] -name = "utf8-zero" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8c0a043c9540bae7c578c88f91dda8bd82e59ae27c21baca69c8b191aaf5a6e" - [[package]] name = "utf8_iter" version = "1.0.4" diff --git a/Cargo.toml b/Cargo.toml index 43b6f84d..1a89cc99 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,10 +21,8 @@ members = [ "xtask", "sdk", "cli", - "cli/integrate-tests", "cli/local-tests", "mcp", - "mcp/integrate-tests", "gui/src-tauri", ] diff --git a/cli/integrate-tests/Cargo.toml b/cli/integrate-tests/Cargo.toml deleted file mode 100644 index 93c24302..00000000 --- a/cli/integrate-tests/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "tnmsc-integrate-tests" -publish = false -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -authors.workspace = true -repository.workspace = true - -[dependencies] -serde_json = { workspace = true } -tar = "0.4" -flate2 = "1" -testcontainers = { version = "0.27.3", features = ["blocking"] } diff --git a/cli/integrate-tests/src/lib.rs b/cli/integrate-tests/src/lib.rs deleted file mode 100644 index e080c8bb..00000000 --- a/cli/integrate-tests/src/lib.rs +++ /dev/null @@ -1,915 +0,0 @@ -#![allow(dead_code)] - -use std::ffi::OsStr; -use std::fs; -use std::path::{Path, PathBuf}; -use std::process::{Command, Output}; -use std::sync::OnceLock; -use std::time::{SystemTime, UNIX_EPOCH}; - -use testcontainers::core::{ExecCommand, WaitFor}; -use testcontainers::runners::SyncRunner; -use testcontainers::{Container, GenericImage, ImageExt}; - -pub const DOCKER_IMAGE_NAME: &str = "node"; -pub const DOCKER_IMAGE_TAG: &str = "22-trixie"; -const EXIT_MARKER: &str = "__TNMSC_EXIT_CODE__="; - -pub const EXPECTED_SUBCOMMANDS: &[&str] = &["install", "dry-run", "clean", "version", "help"]; -pub const PACKAGED_PLATFORM_PACKAGE: &str = "@truenine/memory-sync-cli-linux-x64-gnu"; - -static PNPM_VERSION: OnceLock = OnceLock::new(); -static RELEASE_BINARY_BUILT: OnceLock<()> = OnceLock::new(); -static RELEASE_TEST_API_BINARY_BUILT: OnceLock<()> = OnceLock::new(); -static PACKED_CLI_ARTIFACTS: OnceLock = OnceLock::new(); - -pub struct CommandResult { - pub status: i32, - pub stdout: String, - pub stderr: String, -} - -impl CommandResult { - pub fn assert_success(&self, context: &str) { - assert_eq!( - self.status, 0, - "{context} should succeed.\nexit: {}\nstdout:\n{}\nstderr:\n{}", - self.status, self.stdout, self.stderr - ); - } - - pub fn assert_failure(&self, context: &str) { - assert_ne!( - self.status, 0, - "{context} should fail.\nstdout:\n{}\nstderr:\n{}", - self.stdout, self.stderr - ); - } -} - -pub struct TestDir { - path: PathBuf, -} - -impl TestDir { - pub fn new(prefix: &str) -> Self { - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("system time should be after UNIX_EPOCH") - .as_nanos(); - - let base_dir = integration_tmp_root(); - fs::create_dir_all(&base_dir) - .unwrap_or_else(|error| panic!("failed to create {}: {error}", base_dir.display())); - - let path = base_dir.join(format!("{prefix}-{}-{timestamp}", std::process::id())); - fs::create_dir_all(&path).unwrap_or_else(|error| { - panic!( - "failed to create temp directory {}: {error}", - path.display() - ) - }); - - Self { path } - } - - pub fn path(&self) -> &Path { - &self.path - } -} - -impl Drop for TestDir { - fn drop(&mut self) { - let _ = fs::remove_dir_all(&self.path); - } -} - -pub struct StagedPackageRoot { - _temp_dir: TestDir, - pub package_root: PathBuf, - pub linux_binary: PathBuf, - pub test_api_binary: PathBuf, -} - -pub struct PackedArtifacts { - _temp_dir: TestDir, - pub cli_tarball: PathBuf, - pub linux_tarball: PathBuf, - pub test_api_binary: PathBuf, -} - -pub struct TestContainer { - container: Container, -} - -impl Drop for TestContainer { - fn drop(&mut self) { - let id = self.container.id(); - eprintln!("stopping and removing testcontainer: {id}"); - let _ = self.container.stop(); - } -} - -impl TestContainer { - pub fn start(artifacts: &PackedArtifacts) -> Self { - assert!( - artifacts.cli_tarball.is_file(), - "CLI tarball does not exist: {}", - artifacts.cli_tarball.display() - ); - assert!( - artifacts.linux_tarball.is_file(), - "Linux tarball does not exist: {}", - artifacts.linux_tarball.display() - ); - assert!( - artifacts.test_api_binary.is_file(), - "Test API binary does not exist: {}", - artifacts.test_api_binary.display() - ); - - let image = GenericImage::new(DOCKER_IMAGE_NAME, DOCKER_IMAGE_TAG) - .with_wait_for(WaitFor::seconds(1)) - .with_cmd(vec![ - "sh".to_string(), - "-lc".to_string(), - "while true; do sleep 3600; done".to_string(), - ]) - .with_copy_to("/artifacts/cli.tgz", artifacts.cli_tarball.as_path()) - .with_copy_to( - "/artifacts/linux-x64-gnu.tgz", - artifacts.linux_tarball.as_path(), - ) - .with_copy_to( - "/test-bin/tnmsc-test-api", - artifacts.test_api_binary.as_path(), - ); - - eprintln!( - "[tnmsc-integrate-tests] starting testcontainer ({DOCKER_IMAGE_NAME}:{DOCKER_IMAGE_TAG})..." - ); - let start = std::time::Instant::now(); - let container = image - .start() - .unwrap_or_else(|error| panic!("failed to start testcontainer: {error}")); - eprintln!( - "[tnmsc-integrate-tests] testcontainer started in {:.2}s", - start.elapsed().as_secs_f64() - ); - - Self { container } - } - - pub fn exec_with_retries_and_timeout( - &self, - command: &str, - max_attempts: u32, - delay_ms: u64, - timeout_secs: u64, - ) -> CommandResult { - let mut last_result: Option = None; - for attempt in 1..=max_attempts { - let result = self.exec_with_timeout(command, timeout_secs); - if result.status == 0 { - return result; - } - last_result = Some(result); - if attempt < max_attempts { - std::thread::sleep(std::time::Duration::from_millis(delay_ms)); - } - } - last_result.expect("should have at least one attempt") - } - - pub fn exec_with_timeout(&self, command: &str, timeout_secs: u64) -> CommandResult { - let script = shell_script(command); - let mut exec_result = self - .container - .exec(ExecCommand::new(vec!["sh", "-lc", &script])) - .unwrap_or_else(|error| panic!("failed to exec in testcontainer: {error}")); - - let deadline = std::time::Instant::now() + std::time::Duration::from_secs(timeout_secs); - loop { - if std::time::Instant::now() > deadline { - panic!("command timed out after {timeout_secs}s: {command}"); - } - - if let Ok(Some(_code)) = exec_result.exit_code() { - break; - } - - std::thread::sleep(std::time::Duration::from_millis(100)); - } - - let fallback_status = exec_result.exit_code().ok().flatten().unwrap_or(0) as i32; - let stdout = exec_result - .stdout_to_vec() - .unwrap_or_else(|error| panic!("failed to read exec stdout: {error}")); - let stderr = exec_result - .stderr_to_vec() - .unwrap_or_else(|error| panic!("failed to read exec stderr: {error}")); - let stderr = String::from_utf8_lossy(&stderr).into_owned(); - let (status, stderr) = extract_exit_code(&stderr).unwrap_or((fallback_status, stderr)); - - CommandResult { - status, - stdout: String::from_utf8_lossy(&stdout).into_owned(), - stderr, - } - } - - pub fn exec_with_retries( - &self, - command: &str, - max_attempts: u32, - delay_ms: u64, - ) -> CommandResult { - let mut last_result: Option = None; - for attempt in 1..=max_attempts { - let result = self.exec(command); - if result.status == 0 { - return result; - } - last_result = Some(result); - if attempt < max_attempts { - std::thread::sleep(std::time::Duration::from_millis(delay_ms)); - } - } - last_result.expect("should have at least one attempt") - } - - pub fn exec(&self, command: &str) -> CommandResult { - let script = shell_script(command); - let mut exec_result = self - .container - .exec(ExecCommand::new(vec!["sh", "-lc", &script])) - .unwrap_or_else(|error| panic!("failed to exec in testcontainer: {error}")); - - let fallback_status = exec_result - .exit_code() - .unwrap_or_else(|error| panic!("failed to read exec exit code: {error}")) - .unwrap_or(0) as i32; - let stdout = exec_result - .stdout_to_vec() - .unwrap_or_else(|error| panic!("failed to read exec stdout: {error}")); - let stderr = exec_result - .stderr_to_vec() - .unwrap_or_else(|error| panic!("failed to read exec stderr: {error}")); - let stderr = String::from_utf8_lossy(&stderr).into_owned(); - let (status, stderr) = extract_exit_code(&stderr).unwrap_or((fallback_status, stderr)); - - CommandResult { - status, - stdout: String::from_utf8_lossy(&stdout).into_owned(), - stderr, - } - } - - pub fn exec_success(&self, command: &str) -> CommandResult { - let result = self.exec(command); - result.assert_success(&format!("testcontainer exec `{command}`")); - result - } - - pub fn exec_tnmsc(&self, args: &[&str]) -> CommandResult { - self.exec(&tnmsc_command(args)) - } - - pub fn exec_tnmsc_success(&self, args: &[&str]) -> CommandResult { - let command = tnmsc_command(args); - let result = self.exec(&command); - result.assert_success(&command); - result - } - - pub fn cat(&self, path: &str) -> CommandResult { - self.exec(&format!("cat {}", quote_shell(path))) - } - - pub fn cat_success(&self, path: &str) -> CommandResult { - let result = self.cat(path); - result.assert_success(&format!("read {path}")); - result - } - - pub fn setup(&self) -> ContainerSetup<'_> { - ContainerSetup::new(self) - } -} - -pub struct ContainerSetup<'a> { - container: &'a TestContainer, - lines: Vec, - heredoc_index: usize, -} - -impl<'a> ContainerSetup<'a> { - fn new(container: &'a TestContainer) -> Self { - Self { - container, - lines: Vec::new(), - heredoc_index: 0, - } - } - - pub fn mkdir_p(mut self, path: &str) -> Self { - self.lines.push(format!("mkdir -p {}", quote_shell(path))); - self - } - - pub fn write_file(mut self, path: &str, content: &str) -> Self { - let delimiter = format!("__TNMSC_{}__", self.heredoc_index); - self.heredoc_index += 1; - self.lines.push(format!( - "cat <<'{delimiter}' > {path}\n{content}\n{delimiter}" - )); - self - } - - pub fn rm_rf(mut self, path: &str) -> Self { - self.lines.push(format!("rm -rf {}", quote_shell(path))); - self - } - - pub fn exec(self, context: &str) -> CommandResult { - let script = self.lines.join("\n"); - let result = self.container.exec(&script); - result.assert_success(context); - result - } -} - -pub fn integration_tests_dir() -> PathBuf { - PathBuf::from(env!("CARGO_MANIFEST_DIR")) -} - -pub fn cli_manifest_dir() -> PathBuf { - integration_tests_dir() - .parent() - .expect("integration test crate should live under cli/") - .to_path_buf() -} - -pub fn workspace_root() -> PathBuf { - cli_manifest_dir() - .parent() - .expect("cli crate should live under the workspace root") - .to_path_buf() -} - -pub fn integration_tmp_root() -> PathBuf { - integration_tests_dir().join(".tmp") -} - -pub fn run_tnmsc(args: &[&str], cwd: &Path) -> CommandResult { - run_tnmsc_with_env(args, cwd, &[]) -} - -pub fn run_tnmsc_with_env(args: &[&str], cwd: &Path, envs: &[(&str, &str)]) -> CommandResult { - let mut command = Command::new("cargo"); - command - .args(["run", "-p", "tnmsc", "--bin", "tnmsc", "--"]) - .args(args) - .current_dir(cwd); - for (key, value) in envs { - command.env(key, value); - } - - command_output(&mut command, "cargo run -p tnmsc --bin tnmsc") -} - -pub fn run_packaged_tnmsc_with_env( - args: &[&str], - cwd: &Path, - envs: &[(&str, &str)], -) -> CommandResult { - let mut command = Command::new(release_binary_path()); - command.args(args).current_dir(cwd); - for (key, value) in envs { - command.env(key, value); - } - - command_output(&mut command, "target/debug/tnmsc") -} - -pub fn run_program_inherit(program: &str, args: &[&str], cwd: &Path) -> bool { - let mut command; - #[cfg(unix)] - { - command = Command::new("sh"); - command.args(["-c", &format!("{} {}", program, args.join(" "))]); - command.env_clear(); - if let Ok(path) = std::env::var("PATH") { - command.env("PATH", path); - } - if let Ok(home) = std::env::var("HOME") { - command.env("HOME", home); - } - } - #[cfg(windows)] - { - command = Command::new("cmd"); - command.args(["/C", &format!("{} {}", program, args.join(" "))]); - } - command.current_dir(cwd); - command.stdin(std::process::Stdio::null()); - command.stdout(std::process::Stdio::inherit()); - command.stderr(std::process::Stdio::inherit()); - - match command.status() { - Ok(status) => status.success(), - Err(error) => { - eprintln!("failed to run {program}: {error}"); - false - } - } -} - -pub fn run_program(program: &str, args: &[&str], cwd: &Path) -> CommandResult { - let mut command; - #[cfg(unix)] - { - command = Command::new("sh"); - command.args(["-c", &format!("{} {}", program, args.join(" "))]); - command.env_clear(); - if let Ok(path) = std::env::var("PATH") { - command.env("PATH", path); - } - if let Ok(home) = std::env::var("HOME") { - command.env("HOME", home); - } - } - #[cfg(windows)] - { - command = Command::new("cmd"); - command.args(["/C", &format!("{} {}", program, args.join(" "))]); - } - command.current_dir(cwd); - - command_output(&mut command, program) -} - -pub fn current_package_version() -> &'static str { - env!("CARGO_PKG_VERSION") -} - -pub fn pnpm_version() -> &'static str { - PNPM_VERSION.get_or_init(|| { - let package_json_path = workspace_root().join("package.json"); - let raw = fs::read_to_string(&package_json_path) - .unwrap_or_else(|error| panic!("failed to read {}: {error}", package_json_path.display())); - let parsed: serde_json::Value = serde_json::from_str(&raw) - .unwrap_or_else(|error| panic!("failed to parse {}: {error}", package_json_path.display())); - let package_manager = parsed - .get("packageManager") - .and_then(|value| value.as_str()) - .unwrap_or("pnpm@latest"); - - package_manager - .rsplit_once('@') - .map(|(_, version)| version.to_string()) - .unwrap_or_else(|| "latest".to_string()) - }) -} - -pub fn ensure_release_binary() { - RELEASE_BINARY_BUILT.get_or_init(|| { - eprintln!("[tnmsc-integrate-tests] compiling debug binary (cargo build -p tnmsc)..."); - let start = std::time::Instant::now(); - let status = run_program_inherit("cargo", &["build", "-p", "tnmsc"], &workspace_root()); - eprintln!( - "[tnmsc-integrate-tests] debug binary compilation finished in {:.2}s", - start.elapsed().as_secs_f64() - ); - assert!(status, "cargo build -p tnmsc failed"); - }); - - let binary = release_binary_path(); - assert!(binary.is_file(), "missing binary at {}", binary.display()); -} - -pub fn ensure_release_test_api_binary() { - RELEASE_TEST_API_BINARY_BUILT.get_or_init(|| { - eprintln!("[tnmsc-integrate-tests] compiling test-api debug binary (cargo build -p tnmsc --bin tnmsc-test-api)..."); - let start = std::time::Instant::now(); - let status = run_program_inherit( - "cargo", - &[ - "build", - "-p", - "tnmsc", - "--bin", - "tnmsc-test-api", - ], - &workspace_root(), - ); - eprintln!( - "[tnmsc-integrate-tests] test-api debug binary compilation finished in {:.2}s", - start.elapsed().as_secs_f64() - ); - assert!(status, "cargo build -p tnmsc --bin tnmsc-test-api failed"); - }); - - let binary = release_test_api_binary_path(); - assert!( - binary.is_file(), - "missing test API binary at {}", - binary.display() - ); -} - -pub fn release_binary_path() -> PathBuf { - let binary_name = if cfg!(windows) { "tnmsc.exe" } else { "tnmsc" }; - workspace_root() - .join("target") - .join("debug") - .join(binary_name) -} - -pub fn release_test_api_binary_path() -> PathBuf { - let binary_name = if cfg!(windows) { - "tnmsc-test-api.exe" - } else { - "tnmsc-test-api" - }; - workspace_root() - .join("target") - .join("debug") - .join(binary_name) -} - -fn cached_linux_binary_path() -> PathBuf { - workspace_root() - .join("target") - .join("debug") - .join("tnmsc-linux-x64-gnu") -} - -pub fn create_staged_package_root() -> StagedPackageRoot { - let cli_dir = cli_manifest_dir(); - assert!( - cli_dir.exists(), - "CLI manifest directory does not exist: {}", - cli_dir.display() - ); - assert!( - cli_dir.join("package.json").is_file(), - "CLI package.json not found at {}", - cli_dir.join("package.json").display() - ); - - let temp_dir = TestDir::new("tnmsc-packaging"); - let package_root = temp_dir.path().join("cli"); - - copy_file( - &cli_manifest_dir().join("package.json"), - &package_root.join("package.json"), - ); - copy_dir_all(&cli_manifest_dir().join("bin"), &package_root.join("bin")); - copy_dir_all( - &cli_manifest_dir().join("schema"), - &package_root.join("schema"), - ); - copy_file( - &cli_manifest_dir() - .join("npm") - .join("linux-x64-gnu") - .join("package.json"), - &package_root - .join("npm") - .join("linux-x64-gnu") - .join("package.json"), - ); - - let linux_binary = package_root - .join("npm") - .join("linux-x64-gnu") - .join("bin") - .join("tnmsc"); - let test_api_binary = release_test_api_binary_path(); - - StagedPackageRoot { - _temp_dir: temp_dir, - package_root, - linux_binary, - test_api_binary, - } -} - -pub fn pack_cli_artifacts() -> Option<&'static PackedArtifacts> { - Some(PACKED_CLI_ARTIFACTS.get_or_init(pack_cli_artifacts_once)) -} - -fn pack_cli_artifacts_once() -> PackedArtifacts { - eprintln!("[tnmsc-integrate-tests] packing CLI artifacts..."); - let total_start = std::time::Instant::now(); - - ensure_release_binary(); - ensure_release_test_api_binary(); - - let temp_dir = TestDir::new("tnmsc-packed-artifacts"); - let staged = create_staged_package_root(); - let package_root = staged.package_root.to_string_lossy().into_owned(); - let workspace_root_dir = workspace_root().to_string_lossy().into_owned(); - - eprintln!("[tnmsc-integrate-tests] running assemble-npm..."); - let assemble = run_packaged_tnmsc_with_env( - &["assemble-npm", "--profile", "debug"], - &workspace_root(), - &[ - ("TNMSC_NPM_PACKAGE_ROOT", package_root.as_str()), - ("TNMSC_WORKSPACE_ROOT", workspace_root_dir.as_str()), - ], - ); - assemble.assert_success("tnmsc assemble-npm for staged package root"); - - if !staged.linux_binary.is_file() { - let cached = cached_linux_binary_path(); - if cached.is_file() { - eprintln!( - "[tnmsc-integrate-tests] using cached linux-x64-gnu binary from {}", - cached.display(), - ); - fs::copy(&cached, &staged.linux_binary).unwrap_or_else(|error| { - panic!( - "failed to copy cached linux binary from {} to {}: {error}", - cached.display(), - staged.linux_binary.display() - ) - }); - } else { - eprintln!( - "[tnmsc-integrate-tests] linux-x64-gnu binary not found at {} — attempting cross-compilation with cargo-zigbuild...", - staged.linux_binary.display(), - ); - let cross_start = std::time::Instant::now(); - - let cross_ok = run_program_inherit( - "cargo", - &[ - "zigbuild", - "--target", - "x86_64-unknown-linux-gnu", - "-p", - "tnmsc", - ], - &workspace_root(), - ); - - if !cross_ok { - panic!( - "cross-compilation to x86_64-unknown-linux-gnu failed. \ - ensure zig is installed (e.g., scoop install zig) and cargo-zigbuild is installed (cargo install cargo-zigbuild)." - ); - } - eprintln!( - "[tnmsc-integrate-tests] cross-compilation finished in {:.2}s", - cross_start.elapsed().as_secs_f64() - ); - - let assemble_cross = run_packaged_tnmsc_with_env( - &["assemble-npm", "--profile", "debug"], - &workspace_root(), - &[ - ("TNMSC_NPM_PACKAGE_ROOT", package_root.as_str()), - ("TNMSC_WORKSPACE_ROOT", workspace_root_dir.as_str()), - ], - ); - assemble_cross.assert_success("tnmsc assemble-npm after cross-compilation"); - - assert!( - staged.linux_binary.is_file(), - "linux-x64-gnu binary still missing after cross-compilation at {}", - staged.linux_binary.display() - ); - - // Persist cross-compiled binary for future test runs - if let Err(error) = fs::copy(&staged.linux_binary, &cached) { - eprintln!( - "[tnmsc-integrate-tests] warning: failed to cache linux binary to {}: {error}", - cached.display() - ); - } - } - } - - let linux_tarball = pack_package( - &staged.package_root.join("npm").join("linux-x64-gnu"), - temp_dir.path(), - "linux-x64-gnu", - ); - rewrite_main_package_json( - &staged.package_root.join("package.json"), - "file:/artifacts/linux-x64-gnu.tgz", - ); - let cli_tarball = pack_package(&staged.package_root, temp_dir.path(), "cli"); - - eprintln!( - "[tnmsc-integrate-tests] artifact packing finished in {:.2}s", - total_start.elapsed().as_secs_f64() - ); - - PackedArtifacts { - _temp_dir: temp_dir, - cli_tarball, - linux_tarball, - test_api_binary: staged.test_api_binary, - } -} - -pub fn install_packaged_cli_container() -> Option { - let artifacts = pack_cli_artifacts()?; - let container = TestContainer::start(artifacts); - let install_command = format!("npm install -g {}", quote_shell("/artifacts/cli.tgz")); - let result = container.exec_with_retries_and_timeout(&install_command, 3, 2000, 120); - result.assert_success(&format!( - "install tnmsc globally (attempted up to 3 times): {}", - install_command - )); - Some(container) -} - -pub fn tnmsc_command(args: &[&str]) -> String { - let mut command = String::from("tnmsc"); - for arg in args { - command.push(' '); - command.push_str("e_shell(arg)); - } - command -} - -pub fn quote_shell(value: &str) -> String { - format!("'{}'", value.replace('\'', "'\"'\"'")) -} - -fn pack_package(package_dir: &Path, target_root: &Path, name: &str) -> PathBuf { - assert!( - package_dir.exists(), - "package directory does not exist: {}", - package_dir.display() - ); - - let pack_destination = target_root.join(name); - fs::create_dir_all(&pack_destination).unwrap_or_else(|error| { - panic!( - "failed to create pack destination {}: {error}", - pack_destination.display() - ) - }); - - let package_dir = package_dir.to_string_lossy().into_owned(); - let pack_destination = pack_destination.to_string_lossy().into_owned(); - let result = run_program( - "pnpm", - &[ - "-C", - &package_dir, - "pack", - "--pack-destination", - &pack_destination, - ], - &workspace_root(), - ); - result.assert_success(&format!("pnpm pack for {}", package_dir)); - - let mut tarballs = fs::read_dir(&pack_destination) - .unwrap_or_else(|error| panic!("failed to read {}: {error}", pack_destination)) - .filter_map(|entry| entry.ok()) - .map(|entry| entry.path()) - .filter(|path| path.extension().and_then(OsStr::to_str) == Some("tgz")) - .collect::>(); - - tarballs.sort(); - assert!( - tarballs.len() == 1, - "expected exactly one tarball in {}, found {}", - pack_destination, - tarballs.len() - ); - - tarballs.remove(0) -} - -fn rewrite_main_package_json(path: &Path, platform_dependency: &str) { - let raw = fs::read_to_string(path) - .unwrap_or_else(|error| panic!("failed to read {}: {error}", path.display())); - let mut parsed: serde_json::Value = serde_json::from_str(&raw) - .unwrap_or_else(|error| panic!("failed to parse {}: {error}", path.display())); - - let object = parsed.as_object_mut().unwrap_or_else(|| { - panic!( - "expected top-level package.json object at {}", - path.display() - ) - }); - object.insert( - "optionalDependencies".to_string(), - serde_json::Value::Object( - [( - PACKAGED_PLATFORM_PACKAGE.to_string(), - serde_json::Value::String(platform_dependency.to_string()), - )] - .into_iter() - .collect(), - ), - ); - - fs::write( - path, - serde_json::to_string_pretty(&parsed) - .unwrap_or_else(|error| panic!("failed to serialize {}: {error}", path.display())), - ) - .unwrap_or_else(|error| panic!("failed to write {}: {error}", path.display())); -} - -fn copy_dir_all(source: &Path, destination: &Path) { - fs::create_dir_all(destination) - .unwrap_or_else(|error| panic!("failed to create {}: {error}", destination.display())); - - for entry in fs::read_dir(source) - .unwrap_or_else(|error| panic!("failed to read {}: {error}", source.display())) - { - let entry = - entry.unwrap_or_else(|error| panic!("failed to read entry in {}: {error}", source.display())); - let file_type = entry.file_type().unwrap_or_else(|error| { - panic!( - "failed to read file type for {}: {error}", - entry.path().display() - ) - }); - let destination_path = destination.join(entry.file_name()); - - if file_type.is_dir() { - copy_dir_all(&entry.path(), &destination_path); - } else { - copy_file(&entry.path(), &destination_path); - } - } -} - -fn copy_file(source: &Path, destination: &Path) { - if let Some(parent) = destination.parent() { - fs::create_dir_all(parent) - .unwrap_or_else(|error| panic!("failed to create {}: {error}", parent.display())); - } - - fs::copy(source, destination).unwrap_or_else(|error| { - panic!( - "failed to copy {} to {}: {error}", - source.display(), - destination.display() - ) - }); -} - -fn command_output(command: &mut Command, label: &str) -> CommandResult { - match command.output() { - Ok(output) => decode_output(output), - Err(error) => CommandResult { - status: 1, - stdout: String::new(), - stderr: format!("failed to run {label}: {error}"), - }, - } -} - -fn decode_output(output: Output) -> CommandResult { - CommandResult { - status: output.status.code().unwrap_or(1), - stdout: String::from_utf8_lossy(&output.stdout).into_owned(), - stderr: String::from_utf8_lossy(&output.stderr).into_owned(), - } -} - -fn shell_script(command: &str) -> String { - [ - "set +e", - "export HOME=/root", - "export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", - "mkdir -p /artifacts", - "cd /", - command, - "status=$?", - &format!("printf '{}%s\\n' \"$status\" >&2", EXIT_MARKER), - "exit 0", - ] - .join("\n") -} - -fn extract_exit_code(stderr: &str) -> Option<(i32, String)> { - let mut lines = stderr.lines().map(str::to_string).collect::>(); - let marker_index = lines - .iter() - .rposition(|line| line.starts_with(EXIT_MARKER))?; - let marker = lines.remove(marker_index); - let exit_code = marker[EXIT_MARKER.len()..].parse::().ok()?; - let cleaned = if lines.is_empty() { - String::new() - } else { - let mut joined = lines.join("\n"); - joined.push('\n'); - joined - }; - - Some((exit_code, cleaned)) -} diff --git a/cli/integrate-tests/tests/clean_blackbox.rs b/cli/integrate-tests/tests/clean_blackbox.rs deleted file mode 100644 index a8f9540d..00000000 --- a/cli/integrate-tests/tests/clean_blackbox.rs +++ /dev/null @@ -1,100 +0,0 @@ -//! 验证 `tnmsc clean` 命令: 递归删除空项目目录和孤立的 Agent 文件 (无对应 agt.mdx 的 AGENTS.md/CLAUDE.md)。 -//! -//! **断言**: -//! - 工作区下的空项目目录被移除 (递归清理) -//! - 无对应 agt.mdx 时,孤立的 AGENTS.md 和 CLAUDE.md 被删除 (孤立文件清理) - -use tnmsc_integrate_tests::install_packaged_cli_container; - -#[test] -fn clean_keeps_empty_project_directories_without_generated_outputs() { - let container = install_packaged_cli_container().unwrap(); - - container - .setup() - .mkdir_p("/workspace/demo/project-a/subdir/empty") - .mkdir_p("/workspace/demo/project-b") - .mkdir_p("/workspace/demo/project-c/nested/empty") - .mkdir_p("/root/.aindex") - .write_file( - "/root/.aindex/.tnmsc.json", - r#"{ - "workspaceDir": "/workspace/demo", - "plugins": {} -}"#, - ) - .exec("setup clean test workspace"); - - let ls_before = container.exec("ls -la /workspace/demo"); - ls_before.assert_success("list directories before clean"); - assert!(ls_before.stdout.contains("project-a")); - assert!(ls_before.stdout.contains("project-b")); - assert!(ls_before.stdout.contains("project-c")); - - let clean = container.exec("cd /workspace/demo && tnmsc clean"); - clean.assert_success("tnmsc clean"); - - let check_empty = container.exec("find /workspace/demo -type d -empty | sort"); - check_empty.assert_success("find empty directories after clean"); - assert!( - check_empty.stdout.contains("/workspace/demo/project-a"), - "empty project-a should remain when there are no generated outputs.\nstdout:\n{}", - check_empty.stdout - ); - assert!( - check_empty.stdout.contains("/workspace/demo/project-b"), - "empty project-b should remain when there are no generated outputs.\nstdout:\n{}", - check_empty.stdout - ); - assert!( - check_empty.stdout.contains("/workspace/demo/project-c"), - "empty project-c should remain when there are no generated outputs.\nstdout:\n{}", - check_empty.stdout - ); -} - -#[test] -fn clean_removes_stale_agents_and_claude_files() { - let container = install_packaged_cli_container().unwrap(); - - container - .setup() - .mkdir_p("/workspace/demo/project-a") - .mkdir_p("/workspace/demo/project-b") - .mkdir_p("/root/.aindex") - .write_file( - "/root/.aindex/.tnmsc.json", - r#"{ - "workspaceDir": "/workspace/demo", - "plugins": {} -}"#, - ) - .write_file("/workspace/demo/project-a/AGENTS.md", "Stale agents file") - .write_file("/workspace/demo/project-a/CLAUDE.md", "Stale claude file") - .write_file( - "/workspace/demo/project-b/AGENTS.md", - "Another stale agents file", - ) - .exec("setup clean stale files workspace"); - - let cat_agents_a = container.cat_success("/workspace/demo/project-a/AGENTS.md"); - assert!(cat_agents_a.stdout.contains("Stale agents file")); - - let cat_claude_a = container.cat_success("/workspace/demo/project-a/CLAUDE.md"); - assert!(cat_claude_a.stdout.contains("Stale claude file")); - - let cat_agents_b = container.cat_success("/workspace/demo/project-b/AGENTS.md"); - assert!(cat_agents_b.stdout.contains("Another stale agents file")); - - let clean = container.exec("cd /workspace/demo && tnmsc clean"); - clean.assert_success("tnmsc clean"); - - let ls_project_a = container.exec("ls -la /workspace/demo/project-a"); - ls_project_a.assert_success("list project-a after clean"); - assert!(!ls_project_a.stdout.contains("AGENTS.md")); - assert!(!ls_project_a.stdout.contains("CLAUDE.md")); - - let ls_project_b = container.exec("ls -la /workspace/demo/project-b"); - ls_project_b.assert_success("list project-b after clean"); - assert!(!ls_project_b.stdout.contains("AGENTS.md")); -} diff --git a/cli/integrate-tests/tests/command_contract.rs b/cli/integrate-tests/tests/command_contract.rs deleted file mode 100644 index 7f2006a0..00000000 --- a/cli/integrate-tests/tests/command_contract.rs +++ /dev/null @@ -1,28 +0,0 @@ -//! 验证 CLI 合约: 打包后的 `tnmsc` 二进制在全局安装后暴露预期的命令界面 (子命令和版本)。 -//! -//! **断言**: -//! - `help` 输出列出所有预期的子命令 (完整性) -//! - `version` 输出与当前包版本一致 (正确性) - -use tnmsc_integrate_tests::{ - EXPECTED_SUBCOMMANDS, current_package_version, install_packaged_cli_container, -}; - -#[test] -fn packaged_cli_contract_runs_inside_testcontainer() { - let container = install_packaged_cli_container().unwrap(); - - let help = container.exec_tnmsc(&["help"]); - help.assert_success("global tnmsc help"); - for expected in EXPECTED_SUBCOMMANDS { - assert!( - help.stdout.contains(expected), - "help output should include `{expected}`.\nstdout:\n{}", - help.stdout - ); - } - - let version = container.exec_tnmsc(&["version"]); - version.assert_success("global tnmsc version"); - assert_eq!(version.stdout.trim(), current_package_version()); -} diff --git a/cli/integrate-tests/tests/install_smoke.rs b/cli/integrate-tests/tests/install_smoke.rs deleted file mode 100644 index 27da5634..00000000 --- a/cli/integrate-tests/tests/install_smoke.rs +++ /dev/null @@ -1,99 +0,0 @@ -//! 验证 `tnmsc install` 命令: 从 aindex memory 生成 CLAUDE.md 并验证必需的配置。 -//! -//! **断言**: -//! - 生成的 CLAUDE.md 包含 aindex 中的全局和工作区提示词 (memory 聚合) -//! - 缺少 workspaceDir 时 `tnmsc install` 失败并给出清晰错误 (验证) -//! - 缺少 .tnmsc.json 时 `tnmsc install` 失败并给出清晰错误 (配置文件要求) - -use tnmsc_integrate_tests::install_packaged_cli_container; - -#[test] -fn packaged_cli_install_writes_claude_memory_from_aindex() { - let container = install_packaged_cli_container().unwrap(); - - container - .setup() - .mkdir_p("/workspace/demo/aindex") - .mkdir_p("/root/.aindex") - .write_file( - "/root/.aindex/.tnmsc.json", - r#"{ - "workspaceDir": "/workspace/demo", - "plugins": { - "claudeCode": true - } -}"#, - ) - .write_file( - "/workspace/demo/aindex/global.mdx", - "Global memory from aindex", - ) - .write_file( - "/workspace/demo/aindex/workspace.mdx", - "Workspace root prompt from aindex", - ) - .exec("setup install smoke workspace"); - - let install = container.exec("cd /workspace/demo && tnmsc install"); - install.assert_success("global tnmsc install"); - - let claude = container.cat_success("/workspace/demo/CLAUDE.md"); - assert!( - claude.stdout.contains("Global memory from aindex"), - "generated CLAUDE.md should include the global memory.\nstdout:\n{}", - claude.stdout - ); - assert!( - claude.stdout.contains("Workspace root prompt from aindex"), - "generated CLAUDE.md should include the workspace prompt.\nstdout:\n{}", - claude.stdout - ); -} - -#[test] -fn packaged_cli_install_errors_when_workspace_dir_not_configured() { - let container = install_packaged_cli_container().unwrap(); - - container - .setup() - .mkdir_p("/workspace/demo/aindex") - .mkdir_p("/root/.aindex") - .write_file( - "/root/.aindex/.tnmsc.json", - r#"{ - "plugins": { - "claudeCode": true - } -}"#, - ) - .exec("setup missing workspaceDir workspace"); - - let install = container.exec("cd /workspace/demo && tnmsc install"); - install.assert_failure("tnmsc install without workspaceDir"); - assert!( - install.stderr.contains("workspaceDir"), - "stderr should mention workspaceDir as the missing required field.\nstdout:\n{}\nstderr:\n{}", - install.stdout, - install.stderr - ); -} - -#[test] -fn packaged_cli_install_errors_when_config_file_missing() { - let container = install_packaged_cli_container().unwrap(); - - container - .setup() - .mkdir_p("/workspace/demo/aindex") - .rm_rf("/root/.aindex") - .exec("setup missing config workspace"); - - let install = container.exec("cd /workspace/demo && tnmsc install"); - install.assert_failure("tnmsc install without config file"); - assert!( - install.stderr.contains(".tnmsc.json"), - "stderr should mention .tnmsc.json as the missing config file.\nstdout:\n{}\nstderr:\n{}", - install.stdout, - install.stderr - ); -} diff --git a/cli/integrate-tests/tests/packaging_smoke.rs b/cli/integrate-tests/tests/packaging_smoke.rs deleted file mode 100644 index dc27dd39..00000000 --- a/cli/integrate-tests/tests/packaging_smoke.rs +++ /dev/null @@ -1,90 +0,0 @@ -//! 验证 npm 打包流程: `assemble-npm` 生成的 release 二进制在全局安装后暴露正确的命令界面和插件。 -//! -//! **断言**: -//! - `assemble-npm --profile release` 生成可执行的 Linux 二进制 (构建产物) -//! - 二进制具有可执行权限 (Unix 权限正确性) -//! - 全局安装的 `tnmsc help` 列出所有预期的子命令 (命令界面) -//! - 主 npm 包声明正确的平台 optional dependency (包依赖布局) -//! - 平台包结构正确 (npm 包布局) - -use std::fs; - -#[cfg(unix)] -use std::os::unix::fs::PermissionsExt; - -use tnmsc_integrate_tests::{ - EXPECTED_SUBCOMMANDS, PACKAGED_PLATFORM_PACKAGE, create_staged_package_root, - install_packaged_cli_container, run_tnmsc_with_env, workspace_root, -}; - -#[test] -fn packaging_smoke_covers_release_binary_and_global_install() { - let staged = create_staged_package_root(); - let package_root = staged.package_root.to_string_lossy().into_owned(); - let workspace_root_dir = workspace_root().to_string_lossy().into_owned(); - - let assemble = run_tnmsc_with_env( - &["assemble-npm", "--profile", "release"], - &workspace_root(), - &[ - ("TNMSC_NPM_PACKAGE_ROOT", package_root.as_str()), - ("TNMSC_WORKSPACE_ROOT", workspace_root_dir.as_str()), - ], - ); - assemble.assert_success("tnmsc assemble-npm --profile release"); - - assert!( - staged.linux_binary.is_file(), - "expected hydrated linux binary at {}", - staged.linux_binary.display() - ); - - #[cfg(unix)] - { - let mode = fs::metadata(&staged.linux_binary) - .unwrap_or_else(|error| panic!("failed to stat {}: {error}", staged.linux_binary.display())) - .permissions() - .mode(); - assert_ne!( - mode & 0o111, - 0, - "expected {} to be executable, mode was {:o}", - staged.linux_binary.display(), - mode - ); - } - - let container = install_packaged_cli_container().unwrap(); - - let help = container.exec_tnmsc(&["help"]); - help.assert_success("global tnmsc help"); - for expected in EXPECTED_SUBCOMMANDS { - assert!( - help.stdout.contains(expected), - "global help output should include `{expected}`.\nstdout:\n{}", - help.stdout - ); - } - - let main_package_json = fs::read_to_string(staged.package_root.join("package.json")) - .unwrap_or_else(|error| panic!("failed to read staged main package.json: {error}")); - assert!( - main_package_json.contains(PACKAGED_PLATFORM_PACKAGE), - "staged main package.json should declare the packaged platform dependency.\ncontent:\n{}", - main_package_json - ); - - container.exec_success( - r#" -MAIN_PACKAGE_JSON="$(find -L /usr/local/lib/node_modules -path '*/@truenine/memory-sync-cli/package.json' -print -quit)" -PLATFORM_PACKAGE_JSON="$(find -L /usr/local/lib/node_modules -path '*/@truenine/memory-sync-cli-linux-x64-gnu/package.json' -print -quit)" -test -n "$MAIN_PACKAGE_JSON" -test -n "$PLATFORM_PACKAGE_JSON" -test -f "$(dirname "$MAIN_PACKAGE_JSON")/bin/tnmsc.js" -test -x "$(dirname "$PLATFORM_PACKAGE_JSON")/bin/tnmsc" -test -x "$(command -v tnmsc)" -grep -q '"@truenine/memory-sync-cli-linux-x64-gnu"' "$MAIN_PACKAGE_JSON" -test ! -e "$(dirname "$MAIN_PACKAGE_JSON")/dist/index.mjs" -"#, - ); -} diff --git a/cli/integrate-tests/tests/project_config_ts.rs b/cli/integrate-tests/tests/project_config_ts.rs deleted file mode 100644 index 5304e21f..00000000 --- a/cli/integrate-tests/tests/project_config_ts.rs +++ /dev/null @@ -1,189 +0,0 @@ -use serde_json::Value; -use tnmsc_integrate_tests::install_packaged_cli_container; - -fn read_aindex_resolvers(container: &tnmsc_integrate_tests::TestContainer) -> Value { - let result = container - .exec("/test-bin/tnmsc-test-api collect-aindex-resolvers --workspace-dir '/workspace/demo'"); - result.assert_success("collect-aindex-resolvers"); - serde_json::from_str(&result.stdout).expect("resolver output should be valid JSON") -} - -#[test] -fn packaged_cli_collects_project_config_ts_for_supported_series_and_ignores_workspace_root_mirrors() -{ - let container = install_packaged_cli_container().unwrap(); - - container - .setup() - .mkdir_p("/workspace/demo/aindex/app/app-a") - .mkdir_p("/workspace/demo/aindex/arch/arch-a") - .mkdir_p("/workspace/demo/aindex/softwares/software-a") - .mkdir_p("/workspace/demo/aindex/ext/ext-a") - .mkdir_p("/workspace/demo/app/app-a") - .mkdir_p("/workspace/demo/arch/arch-a") - .mkdir_p("/workspace/demo/softwares/software-a") - .mkdir_p("/workspace/demo/ext/ext-a") - .write_file( - "/workspace/demo/aindex/app/app-a/project.config.ts", - r#" -const ctx = globalThis.__tnmsContext ?? {}; -console.log(JSON.stringify({ - source: 'aindex', - projectName: ctx.projectName, - seriesName: ctx.seriesName, - marker: 'app-ok' -})); -"#, - ) - .write_file( - "/workspace/demo/aindex/arch/arch-a/project.config.ts", - r#" -const ctx = globalThis.__tnmsContext ?? {}; -console.log(JSON.stringify({ - source: 'aindex', - projectName: ctx.projectName, - seriesName: ctx.seriesName, - marker: 'arch-ok' -})); -"#, - ) - .write_file( - "/workspace/demo/aindex/softwares/software-a/project.config.ts", - r#" -const ctx = globalThis.__tnmsContext ?? {}; -console.log(JSON.stringify({ - source: 'aindex', - projectName: ctx.projectName, - seriesName: ctx.seriesName, - marker: 'software-ok' -})); -"#, - ) - .write_file( - "/workspace/demo/aindex/ext/ext-a/project.config.ts", - r#" -const ctx = globalThis.__tnmsContext ?? {}; -console.log(JSON.stringify({ - source: 'aindex', - projectName: ctx.projectName, - seriesName: ctx.seriesName, - marker: 'ext-ok' -})); -"#, - ) - .write_file( - "/workspace/demo/app/app-a/project.config.ts", - r#"console.log(JSON.stringify({ source: 'workspace-root', marker: 'wrong-app' }));"#, - ) - .write_file( - "/workspace/demo/arch/arch-a/project.config.ts", - r#"console.log(JSON.stringify({ source: 'workspace-root', marker: 'wrong-arch' }));"#, - ) - .write_file( - "/workspace/demo/softwares/software-a/project.config.ts", - r#"console.log(JSON.stringify({ source: 'workspace-root', marker: 'wrong-software' }));"#, - ) - .write_file( - "/workspace/demo/ext/ext-a/project.config.ts", - r#"console.log(JSON.stringify({ source: 'workspace-root', marker: 'wrong-ext' }));"#, - ) - .exec("setup project.config.ts positive workspace"); - - let parsed = read_aindex_resolvers(&container); - let projects = parsed["workspace"]["projects"] - .as_array() - .expect("projects should be an array"); - - let expected = [ - ("app", "app-a", "app-ok"), - ("arch", "arch-a", "arch-ok"), - ("softwares", "software-a", "software-ok"), - ("ext", "ext-a", "ext-ok"), - ]; - - for (series, project, marker) in expected { - let item = projects - .iter() - .find(|entry| { - entry["projectType"].as_str() == Some(series) && entry["name"].as_str() == Some(project) - }) - .unwrap_or_else(|| panic!("missing project {series}:{project}")); - - assert_eq!(item["projectConfig"]["source"], "aindex"); - assert_eq!(item["projectConfig"]["projectName"], project); - assert_eq!(item["projectConfig"]["seriesName"], series); - assert_eq!(item["projectConfig"]["marker"], marker); - } -} - -#[test] -fn packaged_cli_reports_invalid_project_config_ts_without_failing_collection() { - let container = install_packaged_cli_container().unwrap(); - - container - .setup() - .mkdir_p("/workspace/demo/aindex/app/app-a") - .mkdir_p("/workspace/demo/aindex/ext/ext-a") - .write_file( - "/workspace/demo/aindex/app/app-a/project.config.ts", - r#" -const ctx = globalThis.__tnmsContext ?? {}; -console.log(JSON.stringify({ - source: 'aindex', - projectName: ctx.projectName, - seriesName: ctx.seriesName, - marker: 'ok' -})); -"#, - ) - .write_file( - "/workspace/demo/aindex/ext/ext-a/project.config.ts", - "console.log('{ invalid json');", - ) - .exec("setup invalid project.config.ts workspace"); - - let parsed = read_aindex_resolvers(&container); - let projects = parsed["workspace"]["projects"] - .as_array() - .expect("projects should be an array"); - let ext_project = projects - .iter() - .find(|entry| { - entry["projectType"].as_str() == Some("ext") && entry["name"].as_str() == Some("ext-a") - }) - .expect("missing ext project"); - - assert!( - ext_project["projectConfig"].is_null(), - "invalid project.config.ts should not populate projectConfig" - ); - - let diagnostics = parsed["diagnostics"] - .as_array() - .expect("diagnostics should be an array"); - assert!( - diagnostics - .iter() - .any(|diagnostic| diagnostic["code"] == "AINDEX_PROJECT_CONFIG_TS_INVALID"), - "invalid project.config.ts should emit AINDEX_PROJECT_CONFIG_TS_INVALID diagnostic" - ); -} - -#[test] -fn packaged_tnmsc_does_not_expose_aindex_resolver_test_subcommand() { - let container = install_packaged_cli_container().unwrap(); - let result = container.exec_tnmsc(&[ - "collect-aindex-resolvers", - "--workspace-dir", - "/workspace/demo", - ]); - - result.assert_failure("packaged tnmsc should not expose collect-aindex-resolvers"); - assert!( - result - .stderr - .contains("unrecognized subcommand 'collect-aindex-resolvers'"), - "unexpected stderr:\n{}", - result.stderr - ); -} diff --git a/cli/integrate-tests/tests/proxy_mapping.rs b/cli/integrate-tests/tests/proxy_mapping.rs deleted file mode 100644 index 466dd60d..00000000 --- a/cli/integrate-tests/tests/proxy_mapping.rs +++ /dev/null @@ -1,148 +0,0 @@ -//! 验证代理映射逻辑: aindex/public 下的路径通过前缀转换规则映射到原始的 dotfile 名称。 -//! -//! **断言**: -//! - `install` 使用转换后的路径读取 aindex/public 中的文件 (文件解析) -//! - 代理前缀规则正确转换 dotfile 路径 (.git/ -> ____.git/ 等) (路径映射正确性) - -use tnmsc_integrate_tests::install_packaged_cli_container; - -#[test] -fn packaged_cli_proxy_mapping_reads_public_files_via_transformed_paths() { - let container = install_packaged_cli_container().unwrap(); - - container - .setup() - .mkdir_p("/workspace/demo/aindex") - .mkdir_p("/workspace/demo/aindex/public/____.git/info") - .mkdir_p("/workspace/demo/aindex/public/____.zed") - .mkdir_p("/workspace/demo/aindex/public/____vscode") - .mkdir_p("/workspace/demo/aindex/public/____idea") - .mkdir_p("/root/.aindex") - .write_file( - "/root/.aindex/.tnmsc.json", - r#"{ - "workspaceDir": "/workspace/demo", - "plugins": { - "claudeCode": true - } -}"#, - ) - .write_file( - "/workspace/demo/aindex/global.mdx", - "Global memory from aindex", - ) - .write_file( - "/workspace/demo/aindex/workspace.mdx", - "Workspace root prompt from aindex", - ) - .write_file( - "/workspace/demo/aindex/public/____.git/info/exclude", - "# git exclude patterns\nCLAUDE.md", - ) - .write_file( - "/workspace/demo/aindex/public/____.zed/settings.json", - r#"{ - "tab_size": 2, - "format_on_save": false -}"#, - ) - .write_file( - "/workspace/demo/aindex/public/____vscode/settings.json", - r#"{ - "editor.formatOnSave": false, - "editor.tabSize": 2 -}"#, - ) - .write_file( - "/workspace/demo/aindex/public/____idea/.gitignore", - "*\n!.gitignore", - ) - .write_file( - "/workspace/demo/aindex/public/____editorconfig", - "root = true\n\n[*]\nindent_style = space\nindent_size = 2", - ) - .write_file( - "/workspace/demo/aindex/public/____gitignore", - "node_modules/\ndist/\n.tmp/", - ) - .write_file( - "/workspace/demo/aindex/public/____aiignore", - ".claude/\n.cursor/", - ) - .write_file( - "/workspace/demo/aindex/public/____warpindexignore", - "CLAUDE.md", - ) - .exec("setup proxy mapping workspace"); - - let install = container.exec("cd /workspace/demo && tnmsc install"); - install.assert_success("tnmsc install with aindex/public proxy paths"); -} - -#[test] -fn packaged_cli_proxy_mapping_prefix_rules_are_correct() { - let container = install_packaged_cli_container().unwrap(); - - let proxy_script = r#" -const prefixRules = [ - { match: '.git/', replacement: (p) => p.replace(/^\.git\//, '____.git/') }, - { match: '.zed/', replacement: (p) => p.replace(/^\.zed\//, '____.zed/') }, - { match: '.idea/', replacement: (p) => p.replace(/^\.idea\//, '____idea/') }, - { match: '.vscode/', replacement: (p) => p.replace(/^\.vscode\//, '____vscode/') }, -]; - -function proxy(logicalPath) { - const normalized = logicalPath.replaceAll('\\', '/'); - for (const rule of prefixRules) { - if (normalized.startsWith(rule.match)) { - return rule.replacement(normalized); - } - } - if (!normalized.startsWith('.')) return normalized; - return normalized.replace(/^\.([^/\\]+)/, '____$1'); -} - -const tests = [ - ['.git/info/exclude', '____.git/info/exclude'], - ['.git/HEAD', '____.git/HEAD'], - ['.zed/settings.json', '____.zed/settings.json'], - ['.idea/.gitignore', '____idea/.gitignore'], - ['.idea/codeStyles/Project.xml', '____idea/codeStyles/Project.xml'], - ['.vscode/settings.json', '____vscode/settings.json'], - ['.vscode/extensions.json', '____vscode/extensions.json'], - ['.editorconfig', '____editorconfig'], - ['.gitignore', '____gitignore'], - ['.aiignore', '____aiignore'], - ['.warpindexignore', '____warpindexignore'], - ['plain/path.txt', 'plain/path.txt'], -]; - -let passed = 0; -let failed = 0; -for (const [input, expected] of tests) { - const actual = proxy(input); - if (actual === expected) { - passed++; - } else { - console.error(`FAIL: proxy("${input}") = "${actual}", expected "${expected}"`); - failed++; - } -} -console.log(`proxy prefix rules: ${passed} passed, ${failed} failed`); -if (failed > 0) process.exit(1); -"#; - - container - .setup() - .write_file("/tmp/test_proxy.mjs", proxy_script) - .exec("write proxy test script"); - - let result = container.exec("node --experimental-strip-types /tmp/test_proxy.mjs"); - result.assert_success("proxy prefix rule verification"); - assert!( - result.stdout.contains("0 failed"), - "all proxy prefix rules should pass.\nstdout:\n{}\nstderr:\n{}", - result.stdout, - result.stderr - ); -} diff --git a/cli/integrate-tests/tests/proxy_runtime_locations.rs b/cli/integrate-tests/tests/proxy_runtime_locations.rs deleted file mode 100644 index 3c7cb15b..00000000 --- a/cli/integrate-tests/tests/proxy_runtime_locations.rs +++ /dev/null @@ -1,132 +0,0 @@ -use tnmsc_integrate_tests::install_packaged_cli_container; - -fn proxy_script(prefix: &str) -> String { - format!( - r#" -const ctx = globalThis.__tnmsContext ?? {{}}; -const logicalPath = String(ctx.logicalPath ?? '').replaceAll('\\', '/'); -console.log('{prefix}/' + logicalPath); -"# - ) -} - -#[test] -fn packaged_cli_resolves_proxy_ts_from_all_supported_aindex_locations() { - let container = install_packaged_cli_container().unwrap(); - - container - .setup() - .mkdir_p("/workspace/demo/aindex/app/proj-a") - .mkdir_p("/workspace/demo/aindex/arch/arch-a") - .mkdir_p("/workspace/demo/aindex/softwares/tool-a") - .mkdir_p("/workspace/demo/aindex/ext/ext-a") - .mkdir_p("/workspace/demo/aindex/commands/cmd-a") - .mkdir_p("/workspace/demo/aindex/skills/skill-a") - .mkdir_p("/workspace/demo/aindex/subagents/agent-a") - .write_file( - "/workspace/demo/aindex/app/proj-a/proxy.ts", - &proxy_script("app-proxy"), - ) - .write_file( - "/workspace/demo/aindex/arch/arch-a/proxy.ts", - &proxy_script("arch-proxy"), - ) - .write_file( - "/workspace/demo/aindex/softwares/tool-a/proxy.ts", - &proxy_script("software-proxy"), - ) - .write_file( - "/workspace/demo/aindex/ext/ext-a/proxy.ts", - &proxy_script("ext-proxy"), - ) - .write_file( - "/workspace/demo/aindex/commands/cmd-a/proxy.ts", - &proxy_script("command-proxy"), - ) - .write_file( - "/workspace/demo/aindex/skills/skill-a/proxy.ts", - &proxy_script("skill-proxy"), - ) - .write_file( - "/workspace/demo/aindex/subagents/agent-a/proxy.ts", - &proxy_script("subagent-proxy"), - ) - .exec("setup aindex proxy.ts locations"); - - let cases = [ - ( - "/workspace/demo/aindex/app/proj-a/proxy.ts", - "/workspace/demo/aindex/app/proj-a", - "nested/file.txt", - "app-proxy/nested/file.txt", - ), - ( - "/workspace/demo/aindex/arch/arch-a/proxy.ts", - "/workspace/demo/aindex/arch/arch-a", - "notes/today.md", - "arch-proxy/notes/today.md", - ), - ( - "/workspace/demo/aindex/softwares/tool-a/proxy.ts", - "/workspace/demo/aindex/softwares/tool-a", - "assets/logo.svg", - "software-proxy/assets/logo.svg", - ), - ( - "/workspace/demo/aindex/ext/ext-a/proxy.ts", - "/workspace/demo/aindex/ext/ext-a", - "config/settings.json", - "ext-proxy/config/settings.json", - ), - ( - "/workspace/demo/aindex/commands/cmd-a/proxy.ts", - "/workspace/demo/aindex/commands/cmd-a", - "docs/usage.md", - "command-proxy/docs/usage.md", - ), - ( - "/workspace/demo/aindex/skills/skill-a/proxy.ts", - "/workspace/demo/aindex/skills/skill-a", - "outputs/result.txt", - "skill-proxy/outputs/result.txt", - ), - ( - "/workspace/demo/aindex/subagents/agent-a/proxy.ts", - "/workspace/demo/aindex/subagents/agent-a", - "plans/spec.md", - "subagent-proxy/plans/spec.md", - ), - ]; - - for (proxy_path, root_dir, logical_path, expected) in cases { - let result = container.exec(&format!( - "/test-bin/tnmsc-test-api resolve-proxy-path --proxy-path '{}' --root-dir '{}' --logical-path '{}'", - proxy_path, root_dir, logical_path - )); - result.assert_success("resolve-proxy-path"); - assert_eq!(result.stdout.trim(), expected); - } -} - -#[test] -fn packaged_tnmsc_does_not_expose_proxy_test_subcommand() { - let container = install_packaged_cli_container().unwrap(); - let result = container.exec_tnmsc(&[ - "resolve-proxy-path", - "--proxy-path", - "/tmp/proxy.ts", - "--root-dir", - "/tmp", - "--logical-path", - "demo.txt", - ]); - - result.assert_failure("packaged tnmsc should not expose resolve-proxy-path"); - assert!( - result - .stderr - .contains("unrecognized subcommand 'resolve-proxy-path'"), - "unexpected stderr:\n{}", - result.stderr - ); -} diff --git a/cli/integrate-tests/tests/public_dir_mapped_to_projects.rs b/cli/integrate-tests/tests/public_dir_mapped_to_projects.rs deleted file mode 100644 index e42ada92..00000000 --- a/cli/integrate-tests/tests/public_dir_mapped_to_projects.rs +++ /dev/null @@ -1,219 +0,0 @@ -//! 验证 `public` 目录映射功能: aindex/public 中的文件在 `install` 时被传播到各个项目目录。 -//! -//! **断言**: -//! - VSCode settings.json 被写入项目的 .vscode/ (插件配置传播) -//! - VSCode extensions.json 被写入项目的 .vscode/ (扩展推荐) -//! - Zed settings.json 被写入项目的 .zed/ (编辑器配置映射) -//! - Git info/exclude 包含 aindex/public 中的模式 (git 忽略集成) -//! - EditorConfig 文件被写入项目根目录 (代码风格标准化) -//! - Gitignore 内容包含 public dir 中的模式 (忽略文件聚合) - -use tnmsc_integrate_tests::{TestContainer, install_packaged_cli_container}; - -fn setup_workspace_with_public_files(container: &TestContainer) { - container - .setup() - .mkdir_p("/workspace-demo/project-a") - .mkdir_p("/workspace-demo/aindex/app/project-a") - .mkdir_p("/workspace-demo/aindex/public/____.git/info") - .mkdir_p("/workspace-demo/aindex/public/____.zed") - .mkdir_p("/workspace-demo/aindex/public/____vscode") - .mkdir_p("/workspace-demo/aindex/public/____idea/codeStyles") - .mkdir_p("/workspace-demo/aindex/public/____idea") - .mkdir_p("/root/.aindex") - .write_file( - "/root/.aindex/.tnmsc.json", - r#"{ - "workspaceDir": "/workspace-demo", - "plugins": { - "claudeCode": true, - "vscode": true, - "zed": true, - "git": true, - "readme": true, - "jetbrains": true, - "jetbrainsCodeStyle": true - } -}"#, - ) - .write_file( - "/workspace-demo/aindex/global.mdx", - "Global memory from aindex", - ) - .write_file( - "/workspace-demo/aindex/workspace.mdx", - "Workspace root prompt from aindex", - ) - .write_file( - "/workspace-demo/aindex/app/project-a/agt.mdx", - "Project A memory", - ) - .write_file( - "/workspace-demo/aindex/public/____.git/info/exclude", - "# aindex managed git exclude\nCLAUDE.md\n.tmp/\nnode_modules/", - ) - .write_file( - "/workspace-demo/aindex/public/____.zed/settings.json", - r#"{ - "tab_size": 2, - "format_on_save": false -}"#, - ) - .write_file( - "/workspace-demo/aindex/public/____vscode/settings.json", - r#"{ - "editor.formatOnSave": false, - "editor.tabSize": 2, - "files.autoSave": "afterDelay" -}"#, - ) - .write_file( - "/workspace-demo/aindex/public/____vscode/extensions.json", - r#"{ - "recommendations": [] -}"#, - ) - .write_file( - "/workspace-demo/aindex/public/____idea/.gitignore", - "*\n!.gitignore\n!codeStyles/\n!codeStyles/codeStyleConfig.xml\n!codeStyles/Project.xml", - ) - .write_file( - "/workspace-demo/aindex/public/____idea/codeStyles/Project.xml", - r#" - -"#, - ) - .write_file( - "/workspace-demo/aindex/public/____idea/codeStyles/codeStyleConfig.xml", - r#" - "#, - ) - .write_file( - "/workspace-demo/aindex/public/____editorconfig", - "root = true\n\n[*]\nindent_style = space\nindent_size = 2\nend_of_line = lf\ninsert_final_newline = true", - ) - .write_file( - "/workspace-demo/aindex/public/____gitignore", - "node_modules/\ndist/\n.tmp/\n*.log", - ) - .write_file( - "/workspace-demo/aindex/public/____aiignore", - ".claude/\n.cursor/\n.kiro/\n.skills/", - ) - .write_file( - "/workspace-demo/aindex/public/____warpindexignore", - "CLAUDE.md\nAGENTS.md", - ) - .exec("setup public-dir-mapped workspace"); -} - -#[test] -fn vscode_settings_written_to_project_from_public_dir() { - let container = install_packaged_cli_container().unwrap(); - setup_workspace_with_public_files(&container); - - let install = container.exec("cd /workspace-demo && tnmsc install"); - install.assert_success("tnmsc install with vscode plugin"); - - let vscode_settings = container.cat_success("/workspace-demo/project-a/.vscode/settings.json"); - assert!( - vscode_settings.stdout.contains("editor.tabSize"), - "project-a/.vscode/settings.json should contain editor.tabSize.\nstdout:\n{}", - vscode_settings.stdout - ); - assert!( - vscode_settings.stdout.contains("formatOnSave"), - "project-a/.vscode/settings.json should contain formatOnSave.\nstdout:\n{}", - vscode_settings.stdout - ); -} - -#[test] -fn vscode_extensions_written_to_project_from_public_dir() { - let container = install_packaged_cli_container().unwrap(); - setup_workspace_with_public_files(&container); - - let install = container.exec("cd /workspace-demo && tnmsc install"); - install.assert_success("tnmsc install with vscode plugin"); - - let vscode_ext = container.cat_success("/workspace-demo/project-a/.vscode/extensions.json"); - assert!( - vscode_ext.stdout.contains("recommendations"), - "project-a/.vscode/extensions.json should contain recommendations.\nstdout:\n{}", - vscode_ext.stdout - ); -} - -#[test] -fn zed_settings_written_to_project_from_public_dir() { - let container = install_packaged_cli_container().unwrap(); - setup_workspace_with_public_files(&container); - - let install = container.exec("cd /workspace-demo && tnmsc install"); - install.assert_success("tnmsc install with zed plugin"); - - let zed_settings = container.cat_success("/workspace-demo/project-a/.zed/settings.json"); - assert!( - zed_settings.stdout.contains("tab_size"), - "project-a/.zed/settings.json should contain tab_size.\nstdout:\n{}", - zed_settings.stdout - ); -} - -#[test] -fn git_exclude_written_to_project_from_public_dir() { - let container = install_packaged_cli_container().unwrap(); - setup_workspace_with_public_files(&container); - - container.exec_success("git init /workspace-demo >/dev/null 2>&1"); - - let install = container.exec("cd /workspace-demo && tnmsc install"); - install.assert_success("tnmsc install with git plugin"); - - let git_exclude = container.cat_success("/workspace-demo/.git/info/exclude"); - assert!( - git_exclude.stdout.contains("CLAUDE.md"), - ".git/info/exclude should contain CLAUDE.md from aindex/public.\nstdout:\n{}", - git_exclude.stdout - ); -} - -#[test] -fn editorconfig_written_to_project_from_public_dir() { - let container = install_packaged_cli_container().unwrap(); - setup_workspace_with_public_files(&container); - - let install = container.exec("cd /workspace-demo && tnmsc install"); - install.assert_success("tnmsc install with editorconfig"); - - let editorconfig = container.cat_success("/workspace-demo/project-a/.editorconfig"); - assert!( - editorconfig.stdout.contains("indent_size"), - "project-a/.editorconfig should contain indent_size.\nstdout:\n{}", - editorconfig.stdout - ); - assert!( - editorconfig.stdout.contains("indent_style"), - "project-a/.editorconfig should contain indent_style.\nstdout:\n{}", - editorconfig.stdout - ); -} - -#[test] -fn gitignore_content_read_from_public_dir() { - let container = install_packaged_cli_container().unwrap(); - setup_workspace_with_public_files(&container); - - container.exec_success("git init /workspace-demo >/dev/null 2>&1"); - - let install = container.exec("cd /workspace-demo && tnmsc install"); - install.assert_success("tnmsc install with git plugin"); - - let git_exclude = container.cat_success("/workspace-demo/.git/info/exclude"); - assert!( - git_exclude.stdout.contains("node_modules/"), - "git exclude should contain gitignore content from aindex/public.\nstdout:\n{}", - git_exclude.stdout - ); -} diff --git a/mcp/integrate-tests/Cargo.toml b/mcp/integrate-tests/Cargo.toml deleted file mode 100644 index 76387ba5..00000000 --- a/mcp/integrate-tests/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -name = "tnmsm-integrate-tests" -publish = false -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -authors.workspace = true -repository.workspace = true - -[dependencies] -serde_json = { workspace = true } -testcontainers = { version = "0.27.3", features = ["blocking"] } diff --git a/mcp/integrate-tests/src/lib.rs b/mcp/integrate-tests/src/lib.rs deleted file mode 100644 index 3fc09398..00000000 --- a/mcp/integrate-tests/src/lib.rs +++ /dev/null @@ -1,516 +0,0 @@ -#![allow(dead_code)] - -use std::ffi::OsStr; -use std::fs; -use std::path::{Path, PathBuf}; -use std::process::{Command, Output}; -use std::sync::OnceLock; -use std::time::{SystemTime, UNIX_EPOCH}; - -use testcontainers::core::{ExecCommand, WaitFor}; -use testcontainers::runners::SyncRunner; -use testcontainers::{Container, GenericImage, ImageExt}; - -pub const DOCKER_IMAGE_NAME: &str = "node"; -pub const DOCKER_IMAGE_TAG: &str = "22-trixie"; -const EXIT_MARKER: &str = "__TNMSM_EXIT_CODE__="; - -static PNPM_VERSION: OnceLock = OnceLock::new(); -static RELEASE_BINARY_BUILT: OnceLock<()> = OnceLock::new(); -static REAL_ENV_SKIP_REASON: OnceLock> = OnceLock::new(); - -pub struct CommandResult { - pub status: i32, - pub stdout: String, - pub stderr: String, -} - -impl CommandResult { - pub fn assert_success(&self, context: &str) { - assert!( - self.status == 0, - "{context} should succeed.\nexit: {}\nstdout:\n{}\nstderr:\n{}", - self.status, - self.stdout, - self.stderr - ); - } -} - -pub struct TestDir { - path: PathBuf, -} - -impl TestDir { - pub fn new(prefix: &str) -> Self { - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("system time should be after UNIX_EPOCH") - .as_nanos(); - - let base_dir = integration_tmp_root(); - fs::create_dir_all(&base_dir) - .unwrap_or_else(|error| panic!("failed to create {}: {error}", base_dir.display())); - - let path = base_dir.join(format!("{prefix}-{}-{timestamp}", std::process::id())); - fs::create_dir_all(&path).unwrap_or_else(|error| { - panic!( - "failed to create temp directory {}: {error}", - path.display() - ) - }); - - Self { path } - } - - pub fn path(&self) -> &Path { - &self.path - } -} - -impl Drop for TestDir { - fn drop(&mut self) { - let _ = fs::remove_dir_all(&self.path); - } -} - -pub struct StagedPackageRoot { - _temp_dir: TestDir, - pub package_root: PathBuf, - pub linux_binary: PathBuf, -} - -pub struct PackedArtifacts { - _temp_dir: TestDir, - pub mcp_tarball: PathBuf, - pub linux_tarball: PathBuf, -} - -pub struct TestContainer { - container: Container, -} - -impl TestContainer { - pub fn start(artifacts: &PackedArtifacts) -> Self { - let image = GenericImage::new(DOCKER_IMAGE_NAME, DOCKER_IMAGE_TAG) - .with_wait_for(WaitFor::seconds(1)) - .with_cmd(vec![ - "sh".to_string(), - "-lc".to_string(), - "while true; do sleep 3600; done".to_string(), - ]) - .with_copy_to("/artifacts/mcp.tgz", artifacts.mcp_tarball.as_path()) - .with_copy_to( - "/artifacts/linux-x64-gnu.tgz", - artifacts.linux_tarball.as_path(), - ); - - let container = image - .start() - .unwrap_or_else(|error| panic!("failed to start testcontainer: {error}")); - - Self { container } - } - - pub fn exec(&self, command: &str) -> CommandResult { - let script = shell_script(command); - let mut exec_result = self - .container - .exec(ExecCommand::new(vec!["sh", "-lc", &script])) - .unwrap_or_else(|error| panic!("failed to exec in testcontainer: {error}")); - - let fallback_status = exec_result - .exit_code() - .unwrap_or_else(|error| panic!("failed to read exec exit code: {error}")) - .unwrap_or(0) as i32; - let stdout = exec_result - .stdout_to_vec() - .unwrap_or_else(|error| panic!("failed to read exec stdout: {error}")); - let stderr = exec_result - .stderr_to_vec() - .unwrap_or_else(|error| panic!("failed to read exec stderr: {error}")); - let stderr = String::from_utf8_lossy(&stderr).into_owned(); - let (status, stderr) = extract_exit_code(&stderr).unwrap_or((fallback_status, stderr)); - - CommandResult { - status, - stdout: String::from_utf8_lossy(&stdout).into_owned(), - stderr, - } - } - - pub fn exec_success(&self, command: &str) -> CommandResult { - let result = self.exec(command); - result.assert_success(&format!("testcontainer exec `{command}`")); - result - } -} - -pub fn integration_tests_dir() -> PathBuf { - PathBuf::from(env!("CARGO_MANIFEST_DIR")) -} - -pub fn mcp_manifest_dir() -> PathBuf { - integration_tests_dir() - .parent() - .expect("integration test crate should live under mcp/") - .to_path_buf() -} - -pub fn workspace_root() -> PathBuf { - mcp_manifest_dir() - .parent() - .expect("mcp crate should live under the workspace root") - .to_path_buf() -} - -pub fn integration_tmp_root() -> PathBuf { - integration_tests_dir().join(".tmp") -} - -pub fn run_mcp_with_env(args: &[&str], cwd: &Path, envs: &[(&str, &str)]) -> CommandResult { - let mut command = Command::new("cargo"); - command - .args(["run", "-p", "tnmsm", "--bin", "tnmsm", "--"]) - .args(args) - .current_dir(cwd); - for (key, value) in envs { - command.env(key, value); - } - - command_output(&mut command, "cargo run -p tnmsm --bin tnmsm") -} - -pub fn run_program(program: &str, args: &[&str], cwd: &Path) -> CommandResult { - let mut command = Command::new(program); - command.args(args).current_dir(cwd); - - command_output(&mut command, program) -} - -pub fn current_package_version() -> &'static str { - env!("CARGO_PKG_VERSION") -} - -pub fn is_linux_x64_host() -> bool { - std::env::consts::OS == "linux" && std::env::consts::ARCH == "x86_64" -} - -pub fn real_env_test_skip_reason() -> Option { - REAL_ENV_SKIP_REASON - .get_or_init(compute_real_env_skip_reason) - .clone() -} - -pub fn pnpm_version() -> &'static str { - PNPM_VERSION.get_or_init(|| { - let package_json_path = workspace_root().join("package.json"); - let raw = fs::read_to_string(&package_json_path) - .unwrap_or_else(|error| panic!("failed to read {}: {error}", package_json_path.display())); - let parsed: serde_json::Value = serde_json::from_str(&raw) - .unwrap_or_else(|error| panic!("failed to parse {}: {error}", package_json_path.display())); - let package_manager = parsed - .get("packageManager") - .and_then(|value| value.as_str()) - .unwrap_or("pnpm@latest"); - - package_manager - .rsplit_once('@') - .map(|(_, version)| version.to_string()) - .unwrap_or_else(|| "latest".to_string()) - }) -} - -pub fn ensure_release_binary() { - RELEASE_BINARY_BUILT.get_or_init(|| { - let result = run_program( - "cargo", - &["build", "--release", "-p", "tnmsm"], - &workspace_root(), - ); - result.assert_success("cargo build --release -p tnmsm"); - }); - - let binary = release_binary_path(); - assert!( - binary.is_file(), - "missing release binary at {}", - binary.display() - ); -} - -pub fn release_binary_path() -> PathBuf { - let binary_name = if cfg!(windows) { "tnmsm.exe" } else { "tnmsm" }; - workspace_root() - .join("target") - .join("release") - .join(binary_name) -} - -pub fn create_staged_package_root() -> StagedPackageRoot { - let temp_dir = TestDir::new("tnmsm-packaging"); - let package_root = temp_dir.path().join("mcp"); - - copy_file( - &mcp_manifest_dir().join("package.json"), - &package_root.join("package.json"), - ); - copy_dir_all(&mcp_manifest_dir().join("bin"), &package_root.join("bin")); - copy_file( - &mcp_manifest_dir() - .join("npm") - .join("linux-x64-gnu") - .join("package.json"), - &package_root - .join("npm") - .join("linux-x64-gnu") - .join("package.json"), - ); - - let linux_binary = package_root - .join("npm") - .join("linux-x64-gnu") - .join("bin") - .join("tnmsm"); - - StagedPackageRoot { - _temp_dir: temp_dir, - package_root, - linux_binary, - } -} - -pub fn pack_mcp_artifacts() -> PackedArtifacts { - ensure_release_binary(); - - let temp_dir = TestDir::new("tnmsm-packed-artifacts"); - let staged = create_staged_package_root(); - let package_root = staged.package_root.to_string_lossy().into_owned(); - let workspace_root_dir = workspace_root().to_string_lossy().into_owned(); - - let assemble = run_mcp_with_env( - &["assemble-npm", "--profile", "release"], - &workspace_root(), - &[ - ("TNMSM_NPM_PACKAGE_ROOT", package_root.as_str()), - ("TNMSM_WORKSPACE_ROOT", workspace_root_dir.as_str()), - ], - ); - assemble.assert_success("tnmsm assemble-npm for staged package root"); - - let linux_tarball = pack_package( - &staged.package_root.join("npm").join("linux-x64-gnu"), - temp_dir.path(), - "linux-x64-gnu", - ); - rewrite_main_package_json( - &staged.package_root.join("package.json"), - "file:/artifacts/linux-x64-gnu.tgz", - ); - let mcp_tarball = pack_package(&staged.package_root, temp_dir.path(), "mcp"); - - PackedArtifacts { - _temp_dir: temp_dir, - mcp_tarball, - linux_tarball, - } -} - -pub fn install_packaged_mcp_container() -> TestContainer { - let artifacts = pack_mcp_artifacts(); - let container = TestContainer::start(&artifacts); - let install_command = format!( - "corepack enable && corepack prepare pnpm@{} --activate && pnpm add -g {}", - quote_shell(pnpm_version()), - quote_shell("/artifacts/mcp.tgz") - ); - container.exec_success(&install_command); - container -} - -pub fn quote_shell(value: &str) -> String { - format!("'{}'", value.replace('\'', "'\"'\"'")) -} - -fn compute_real_env_skip_reason() -> Option { - if !is_linux_x64_host() { - return Some("unsupported host platform; real-env tests only run on linux x86_64".to_string()); - } - - let result = run_program( - "docker", - &["info", "--format", "{{.ServerVersion}}"], - &workspace_root(), - ); - if result.status == 0 { - return None; - } - - let detail = trim_output(&result.stderr) - .or_else(|| trim_output(&result.stdout)) - .unwrap_or_else(|| "docker daemon is unavailable".to_string()); - Some(format!("docker unavailable: {detail}")) -} - -fn pack_package(package_dir: &Path, target_root: &Path, name: &str) -> PathBuf { - let pack_destination = target_root.join(name); - fs::create_dir_all(&pack_destination).unwrap_or_else(|error| { - panic!( - "failed to create pack destination {}: {error}", - pack_destination.display() - ) - }); - - let package_dir = package_dir.to_string_lossy().into_owned(); - let pack_destination = pack_destination.to_string_lossy().into_owned(); - let result = run_program( - "pnpm", - &[ - "-C", - &package_dir, - "pack", - "--pack-destination", - &pack_destination, - ], - &workspace_root(), - ); - result.assert_success(&format!("pnpm pack for {}", package_dir)); - - let mut tarballs = fs::read_dir(&pack_destination) - .unwrap_or_else(|error| panic!("failed to read {}: {error}", pack_destination)) - .filter_map(|entry| entry.ok()) - .map(|entry| entry.path()) - .filter(|path| path.extension().and_then(OsStr::to_str) == Some("tgz")) - .collect::>(); - - tarballs.sort(); - assert!( - tarballs.len() == 1, - "expected exactly one tarball in {}, found {}", - pack_destination, - tarballs.len() - ); - - tarballs.remove(0) -} - -fn rewrite_main_package_json(path: &Path, platform_dependency: &str) { - let raw = fs::read_to_string(path) - .unwrap_or_else(|error| panic!("failed to read {}: {error}", path.display())); - let mut parsed: serde_json::Value = serde_json::from_str(&raw) - .unwrap_or_else(|error| panic!("failed to parse {}: {error}", path.display())); - - let object = parsed.as_object_mut().unwrap_or_else(|| { - panic!( - "expected top-level package.json object at {}", - path.display() - ) - }); - object.insert( - "optionalDependencies".to_string(), - serde_json::json!({ - "@truenine/memory-sync-mcp-linux-x64-gnu": platform_dependency - }), - ); - - fs::write( - path, - serde_json::to_string_pretty(&parsed) - .unwrap_or_else(|error| panic!("failed to serialize {}: {error}", path.display())), - ) - .unwrap_or_else(|error| panic!("failed to write {}: {error}", path.display())); -} - -fn copy_file(source: &Path, destination: &Path) { - if let Some(parent) = destination.parent() { - fs::create_dir_all(parent) - .unwrap_or_else(|error| panic!("failed to create {}: {error}", parent.display())); - } - - fs::copy(source, destination).unwrap_or_else(|error| { - panic!( - "failed to copy {} to {}: {error}", - source.display(), - destination.display() - ) - }); -} - -fn copy_dir_all(source: &Path, destination: &Path) { - fs::create_dir_all(destination) - .unwrap_or_else(|error| panic!("failed to create {}: {error}", destination.display())); - - for entry in fs::read_dir(source) - .unwrap_or_else(|error| panic!("failed to read {}: {error}", source.display())) - { - let entry = - entry.unwrap_or_else(|error| panic!("failed to read entry in {}: {error}", source.display())); - let file_type = entry.file_type().unwrap_or_else(|error| { - panic!( - "failed to read file type for {}: {error}", - entry.path().display() - ) - }); - let destination_path = destination.join(entry.file_name()); - - if file_type.is_dir() { - copy_dir_all(&entry.path(), &destination_path); - } else { - copy_file(&entry.path(), &destination_path); - } - } -} - -fn command_output(command: &mut Command, label: &str) -> CommandResult { - let output = command - .output() - .unwrap_or_else(|error| panic!("failed to run {label}: {error}")); - decode_output(output) -} - -fn decode_output(output: Output) -> CommandResult { - CommandResult { - status: output.status.code().unwrap_or(1), - stdout: String::from_utf8_lossy(&output.stdout).into_owned(), - stderr: String::from_utf8_lossy(&output.stderr).into_owned(), - } -} - -fn shell_script(command: &str) -> String { - [ - "set +e", - &format!("export HOME={}", quote_shell("/root")), - "export PNPM_HOME=/pnpm", - "export PATH=\"$PNPM_HOME:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"", - "mkdir -p \"$PNPM_HOME\" /artifacts", - "cd /", - command, - "status=$?", - &format!("printf '{}%s\\n' \"$status\" >&2", EXIT_MARKER), - "exit 0", - ] - .join("\n") -} - -fn extract_exit_code(stderr: &str) -> Option<(i32, String)> { - let mut lines = stderr.lines().map(str::to_string).collect::>(); - let marker_index = lines - .iter() - .rposition(|line| line.starts_with(EXIT_MARKER))?; - let marker = lines.remove(marker_index); - let exit_code = marker[EXIT_MARKER.len()..].parse::().ok()?; - let cleaned = if lines.is_empty() { - String::new() - } else { - let mut joined = lines.join("\n"); - joined.push('\n'); - joined - }; - - Some((exit_code, cleaned)) -} - -fn trim_output(output: &str) -> Option { - let trimmed = output.trim(); - (!trimmed.is_empty()).then(|| trimmed.to_string()) -} diff --git a/mcp/integrate-tests/tests/packaging_smoke.rs b/mcp/integrate-tests/tests/packaging_smoke.rs deleted file mode 100644 index bad8059b..00000000 --- a/mcp/integrate-tests/tests/packaging_smoke.rs +++ /dev/null @@ -1,80 +0,0 @@ -use std::fs; - -#[cfg(unix)] -use std::os::unix::fs::PermissionsExt; - -use tnmsm_integrate_tests::{ - create_staged_package_root, install_packaged_mcp_container, real_env_test_skip_reason, - run_mcp_with_env, workspace_root, -}; - -#[test] -fn packaging_smoke_covers_release_binary_and_global_install() { - if let Some(reason) = real_env_test_skip_reason() { - eprintln!("skipping packaging smoke: {reason}"); - return; - } - - let staged = create_staged_package_root(); - let package_root = staged.package_root.to_string_lossy().into_owned(); - let workspace_root_dir = workspace_root().to_string_lossy().into_owned(); - - let assemble = run_mcp_with_env( - &["assemble-npm", "--profile", "release"], - &workspace_root(), - &[ - ("TNMSM_NPM_PACKAGE_ROOT", package_root.as_str()), - ("TNMSM_WORKSPACE_ROOT", workspace_root_dir.as_str()), - ], - ); - assemble.assert_success("tnmsm assemble-npm --profile release"); - - assert!( - staged.linux_binary.is_file(), - "expected hydrated linux binary at {}", - staged.linux_binary.display() - ); - - #[cfg(unix)] - { - let mode = fs::metadata(&staged.linux_binary) - .unwrap_or_else(|error| panic!("failed to stat {}: {error}", staged.linux_binary.display())) - .permissions() - .mode(); - assert!( - mode & 0o111 != 0, - "expected {} to be executable, mode was {:o}", - staged.linux_binary.display(), - mode - ); - } - - let container = install_packaged_mcp_container(); - - let initialize = container - .exec(r#"printf '%s\n' '{"jsonrpc":"2.0","id":1,"method":"initialize","params":{}}' | tnmsm"#); - initialize.assert_success("global tnmsm initialize"); - for expected in [ - "\"jsonrpc\":\"2.0\"", - "\"protocolVersion\":\"2024-11-05\"", - "\"name\":\"@truenine/memory-sync-mcp\"", - ] { - assert!( - initialize.stdout.contains(expected), - "initialize output should include `{expected}`.\nstdout:\n{}", - initialize.stdout - ); - } - - container.exec_success( - r#" -MAIN_PACKAGE_JSON="$(find -L /pnpm/global -path '*/@truenine/memory-sync-mcp/package.json' -print -quit)" -PLATFORM_PACKAGE_JSON="$(find -L /pnpm/global -path '*/@truenine/memory-sync-mcp-linux-x64-gnu/package.json' -print -quit)" -test -n "$MAIN_PACKAGE_JSON" -test -n "$PLATFORM_PACKAGE_JSON" -test -f "$(dirname "$MAIN_PACKAGE_JSON")/bin/tnmsm.js" -test -x "$(dirname "$PLATFORM_PACKAGE_JSON")/bin/tnmsm" -test -x "$(command -v tnmsm)" -"#, - ); -} diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 03af4b60..e06c093a 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -159,11 +159,7 @@ fn main() -> Result<(), String> { "--exclude", "tnmsg", "--exclude", - "tnmsc-integrate-tests", - "--exclude", "tnmsc-local-tests", - "--exclude", - "tnmsm-integrate-tests", "--lib", "--bins", "--tests", @@ -234,11 +230,7 @@ fn main() -> Result<(), String> { "--exclude", "tnmsg", "--exclude", - "tnmsc-integrate-tests", - "--exclude", "tnmsc-local-tests", - "--exclude", - "tnmsm-integrate-tests", "--lib", "--bins", "--tests", From 8cfe571745a471c7825291f06810c378bf489db3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Thu, 30 Apr 2026 22:26:06 +0800 Subject: [PATCH 33/45] docs(technical-details): add testing strategy and cross-platform design page - Document removal of integration tests due to infrastructure complexity - Position local-tests as the economical and realistic testing choice - State CLI/SDK must be designed cross-platform, not OS-specific - Update technical-details index and _meta.ts navigation --- doc/content/technical-details/_meta.ts | 3 +- doc/content/technical-details/index.mdx | 2 + .../technical-details/testing-strategy.mdx | 49 +++++++++++++++++++ 3 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 doc/content/technical-details/testing-strategy.mdx diff --git a/doc/content/technical-details/_meta.ts b/doc/content/technical-details/_meta.ts index ea8cb52a..d01834e1 100644 --- a/doc/content/technical-details/_meta.ts +++ b/doc/content/technical-details/_meta.ts @@ -9,5 +9,6 @@ export default { 'commands': '命令', 'subagents': '子代理', 'rules': '规则', - 'libraries': '基础库' + 'libraries': '基础库', + 'testing-strategy': '测试策略与跨平台设计' } diff --git a/doc/content/technical-details/index.mdx b/doc/content/technical-details/index.mdx index 1e22774a..4c3de13b 100644 --- a/doc/content/technical-details/index.mdx +++ b/doc/content/technical-details/index.mdx @@ -15,6 +15,7 @@ status: stable 2. 为什么仓库已经明确转向 Rust-first / NAPI-first。 3. 为什么 sync 必须显式建模输出目标、输出范围和清理边界。 4. 为什么不同类型的输入资产不应该塌缩成一个巨大的 prompt。 +5. 为什么集成测试已被移除,以及本地测试和跨平台设计如何成为当前策略。 ## 推荐阅读 @@ -27,3 +28,4 @@ status: stable - [子代理](/docs/technical-details/subagents) - [规则](/docs/technical-details/rules) - [基础库](/docs/technical-details/libraries) +- [测试策略与跨平台设计](/docs/technical-details/testing-strategy) diff --git a/doc/content/technical-details/testing-strategy.mdx b/doc/content/technical-details/testing-strategy.mdx new file mode 100644 index 00000000..9851e6eb --- /dev/null +++ b/doc/content/technical-details/testing-strategy.mdx @@ -0,0 +1,49 @@ +--- +title: 测试策略与跨平台设计 +description: 说明为什么集成测试已被移除,本地测试成为主要手段,以及 CLI/SDK 的跨平台设计约束。 +sidebarTitle: 测试策略与跨平台设计 +status: stable +--- + +# 测试策略与跨平台设计 + +## 集成测试的移除 + +仓库曾经使用基于 Docker / testcontainers 的集成测试(`cli/integrate-tests`、`mcp/integrate-tests`)来验证 CLI 和 MCP 的打包与命令契约。这些测试已被移除,原因如下: + +- **基础设施过于复杂**:维护 Docker 镜像、容器生命周期和跨平台二进制打包的测试基础设施成本过高,且容易因为环境差异产生不稳定的结果。 +- **测试反馈慢**:容器启动、镜像构建和清理过程显著拖慢 CI 反馈循环,不利于快速迭代。 +- **维护负担重**:testcontainers 依赖、平台特定二进制处理(Linux musl/Alpine 等)和容器内路径映射需要持续维护,与核心开发节奏脱节。 + +## 本地测试作为经济且现实的选择 + +当前仓库以 **本地裸机测试(local-tests)** 为主要测试手段: + +- **运行方式**:直接在宿主环境执行编译后的二进制文件,无需容器层。 +- **覆盖范围**:验证 `install`、`clean`、`dry-run`、各输出插件(`claude_code`、`opencode`、`codex` 等)以及日志可观测性的核心路径。 +- **经济优势**:零额外基础设施依赖,CI 和本地开发机均可秒级运行。 +- **现实约束**:本地测试默认复用开发者已有的 `~/.aindex` 配置和真实项目结构,因此测试用例设计为“可恢复”——通过临时修改全局配置并在测试结束后还原,避免破坏用户环境。 + +运行本地测试: + +```bash +cargo test -p tnmsc-local-tests +``` + +> 注意:`cargo test --workspace` 不会自动运行 `local-tests`,因为后者需要宿主环境具备真实配置。请在确认 `~/.aindex/.tnmsc.json` 已配置后单独执行。 + +## CLI/SDK 的跨平台设计约束 + +CLI 和 SDK 的设计必须保持 **跨平台中立**,不能绑定到特定操作系统: + +- **路径处理**:所有路径逻辑使用 Rust 标准库的 `Path`/`PathBuf`,禁止硬编码 Windows 或 Unix 风格的分隔符。WSL 路径同步由独立配置项控制,而非代码级假设。 +- **配置解析**:`~` 展开和目录解析在运行时完成,不依赖 shell 行为。 +- **输出插件**:各插件(`claude_code`、`opencode`、`codex`、`trae`)生成目标文件时,只使用相对路径或平台无关的标识,避免在产物中嵌入 OS 特定路径。 +- **CI 与发布**:构建和测试流水线在 Windows、macOS 和 Linux 上均运行,确保没有平台特有的回归。 + +## 相关页面 + +- [架构边界](/docs/technical-details/architecture) +- [同步流水线](/docs/technical-details/pipeline) +- [CLI / 安装](/docs/cli/install) +- [SDK / 架构](/docs/sdk/architecture) From b08636db0ec4cd29debfb328105d91dd720be203 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sat, 2 May 2026 00:02:52 +0800 Subject: [PATCH 34/45] feat: enhance skill management with category support - Introduced category_name field in SkillPrompt struct to allow categorization of skills. - Added functions to resolve skill directory names and build skill source identifiers based on category. - Updated output plan collection to include categorized skills and their supporting files. - Implemented logic to handle nested skill directories and ensure proper file structure. - Enhanced prompt service to parse and build identifiers for categorized skills. - Added tests to validate the new functionality, ensuring correct handling of categorized and legacy skills. --- cli/local-tests/src/lib.rs | 190 +++++- cli/local-tests/tests/agents_md_smoke.rs | 19 +- cli/local-tests/tests/claude_smoke.rs | 163 ++++++ cli/local-tests/tests/clean_blackbox.rs | 8 + cli/local-tests/tests/codex_smoke.rs | 335 ++++++++--- cli/local-tests/tests/command_contract.rs | 4 + cli/local-tests/tests/dry_run_smoke.rs | 2 + cli/local-tests/tests/install_smoke.rs | 11 + cli/local-tests/tests/logging_clean.rs | 3 + cli/local-tests/tests/logging_dry_run.rs | 3 + .../tests/logging_error_feedback.rs | 4 + .../tests/logging_install_observability.rs | 4 + cli/local-tests/tests/logging_levels.rs | 8 + .../tests/opencode_agent_mode_validation.rs | 3 + cli/local-tests/tests/opencode_smoke.rs | 185 +++++- cli/local-tests/tests/rules_source_smoke.rs | 6 + cli/local-tests/tests/trae_smoke.rs | 9 + .../output_plans/claude_code_output_plan.rs | 186 +++++- .../domain/output_plans/codex_output_plan.rs | 184 +++++- .../domain/output_plans/droid_output_plan.rs | 61 ++ .../output_plans/opencode_output_plan.rs | 186 +++++- sdk/src/domain/plugin_shared.rs | 2 + sdk/src/repositories/skill.rs | 547 +++++++++++++++++- sdk/src/services/prompt_service.rs | 204 ++++++- 24 files changed, 2162 insertions(+), 165 deletions(-) diff --git a/cli/local-tests/src/lib.rs b/cli/local-tests/src/lib.rs index a23afe6f..da63e3c7 100644 --- a/cli/local-tests/src/lib.rs +++ b/cli/local-tests/src/lib.rs @@ -1,16 +1,28 @@ +//! Black-box test infrastructure for the tnmsc CLI. +//! +//! Provides `LocalTestRunner` — a test harness that invokes the real compiled +//! `tnmsc` binary against the actual project directory (`~/workspace/memory-sync/`). +//! Uses cross-process file locking and in-process mutex to serialise access to +//! the shared project, ensuring test isolation. +//! +//! All tests follow the pattern: clean → install → verify → clean. +//! The binary is auto-built from source if not found or stale. + #![allow(dead_code)] use std::fs; use std::path::{Path, PathBuf}; use std::process::{Command, Output}; use std::sync::{Mutex, OnceLock}; -use std::time::Duration; +use std::time::{Duration, SystemTime}; static BINARY_BUILT: OnceLock<()> = OnceLock::new(); static PROJECT_LOCK: OnceLock> = OnceLock::new(); +/// The set of subcommands every tnmsc binary must support (help contract). pub const EXPECTED_SUBCOMMANDS: &[&str] = &["install", "dry-run", "clean", "version", "help"]; +/// Result of a tnmsc CLI invocation: exit code + captured stdout/stderr. pub struct CommandResult { pub status: i32, pub stdout: String, @@ -18,6 +30,7 @@ pub struct CommandResult { } impl CommandResult { + /// Assert the command exited with status 0, panicking with diagnostics if not. pub fn assert_success(&self, context: &str) { assert_eq!( self.status, 0, @@ -26,6 +39,7 @@ impl CommandResult { ); } + /// Assert the command exited with a non-zero status, panicking if it succeeded. pub fn assert_failure(&self, context: &str) { assert_ne!( self.status, 0, @@ -35,6 +49,9 @@ impl CommandResult { } } +/// Test harness for invoking tnmsc against the real project. +/// Acquires both in-process and cross-process locks on construction, +/// so only one test can mutate the project at a time. pub struct LocalTestRunner { binary: PathBuf, cwd: PathBuf, @@ -68,6 +85,7 @@ impl LocalTestRunner { } } + /// Create a runner targeting a specific directory instead of the default project. pub fn with_cwd(cwd: impl AsRef) -> Self { ensure_binary(); let file_lock = acquire_cross_process_lock(); @@ -89,6 +107,7 @@ impl LocalTestRunner { } } + /// The project directory this runner operates on. pub fn cwd(&self) -> &Path { &self.cwd } @@ -120,6 +139,7 @@ impl LocalTestRunner { .unwrap_or_else(|| self.cwd.clone()) } + /// Run `tnmsc ` in the project directory and capture output. pub fn run(&self, args: &[&str]) -> CommandResult { let mut cmd = Command::new(&self.binary); cmd.args(args).current_dir(&self.cwd); @@ -148,12 +168,14 @@ impl LocalTestRunner { command_output(&mut cmd, &format!("tnmsc {}", args.join(" "))) } + /// Run `tnmsc ` and assert it exits 0, returning the result. pub fn run_success(&self, args: &[&str]) -> CommandResult { let result = self.run(args); result.assert_success(&format!("tnmsc {}", args.join(" "))); result } + /// Assert that a `.tnmsc.json` config file exists in cwd or ~/.aindex/. pub fn assert_config_exists(&self) { let config_candidates = [ self.cwd.join(".tnmsc.json"), @@ -171,6 +193,7 @@ impl LocalTestRunner { ); } + /// Assert that an `aindex/` directory exists in cwd or ~/.aindex/. pub fn assert_aindex_exists(&self) { let aindex_candidates = [self.cwd.join("aindex"), home_dir().join(".aindex")]; let found = aindex_candidates.iter().any(|p| p.is_dir()); @@ -185,6 +208,7 @@ impl LocalTestRunner { ); } + /// Assert both config and aindex exist — the project is ready for testing. pub fn assert_project_ready(&self) { self.assert_config_exists(); self.assert_aindex_exists(); @@ -394,6 +418,9 @@ impl Default for LocalTestRunner { // other when running local tests on the shared project directory. // --------------------------------------------------------------------------- +/// A cross-process file lock that prevents concurrent test binaries from +/// mutating the shared project directory simultaneously. +/// Released automatically on drop. pub struct CrossProcessLock(Option); impl Drop for CrossProcessLock { @@ -428,10 +455,12 @@ fn acquire_cross_process_lock() -> CrossProcessLock { } } +/// Ensure the compiled tnmsc binary exists, building it from source if necessary. +/// Tracks source file modification times to avoid unnecessary rebuilds. pub fn ensure_binary() { let binary = binary_path(); - if binary.is_file() { + if binary.is_file() && !binary_needs_rebuild(&binary) { eprintln!( "[tnmsc-local-tests] using existing binary: {}", binary.display() @@ -460,6 +489,55 @@ pub fn ensure_binary() { assert!(binary.is_file(), "missing binary at {}", binary.display()); } +fn binary_needs_rebuild(binary: &Path) -> bool { + let Some(binary_modified) = file_modified_time(binary) else { + return true; + }; + + tracked_cli_input_paths() + .into_iter() + .filter_map(|path| newest_modified_time(&path)) + .any(|input_modified| input_modified > binary_modified) +} + +fn tracked_cli_input_paths() -> Vec { + let root = workspace_root(); + vec![ + root.join("Cargo.toml"), + root.join("Cargo.lock"), + root.join("cli").join("Cargo.toml"), + root.join("cli").join("src"), + root.join("sdk").join("Cargo.toml"), + root.join("sdk").join("src"), + ] +} + +fn newest_modified_time(path: &Path) -> Option { + if path.is_file() { + return file_modified_time(path); + } + if !path.is_dir() { + return None; + } + + let mut newest = file_modified_time(path); + let entries = fs::read_dir(path).ok()?; + for entry in entries.flatten() { + if let Some(child_modified) = newest_modified_time(&entry.path()) { + newest = Some(match newest { + Some(current) if current >= child_modified => current, + _ => child_modified, + }); + } + } + newest +} + +fn file_modified_time(path: &Path) -> Option { + fs::metadata(path).ok()?.modified().ok() +} + +/// Resolve the expected path to the compiled tnmsc debug binary. pub fn binary_path() -> PathBuf { let binary_name = if cfg!(windows) { "tnmsc.exe" } else { "tnmsc" }; workspace_root() @@ -468,6 +546,7 @@ pub fn binary_path() -> PathBuf { .join(binary_name) } +/// Resolve the workspace root (the memory-sync Cargo workspace directory). pub fn workspace_root() -> PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")) .parent() @@ -477,10 +556,12 @@ pub fn workspace_root() -> PathBuf { .to_path_buf() } +/// Resolve the user's home directory, panicking if unavailable. pub fn home_dir() -> PathBuf { dirs::home_dir().expect("should have home directory") } +/// Return the workspace package version string from Cargo.toml. pub fn current_package_version() -> &'static str { env!("CARGO_PKG_VERSION") } @@ -536,3 +617,108 @@ fn decode_output(output: Output) -> CommandResult { stderr: String::from_utf8_lossy(&output.stderr).into_owned(), } } + +#[cfg(test)] +mod tests { + use super::{binary_needs_rebuild, newest_modified_time}; + use std::fs; + use std::path::{Path, PathBuf}; + use std::time::{Duration, SystemTime, UNIX_EPOCH}; + + #[test] + fn binary_requires_rebuild_when_missing() { + let root = make_temp_dir("missing-binary"); + let binary = root.join("tnmsc"); + + assert!(binary_needs_rebuild(&binary)); + } + + #[test] + fn binary_requires_rebuild_when_source_is_newer() { + let root = make_temp_dir("stale-binary"); + let binary = root.join("target").join("debug").join("tnmsc"); + let source = root.join("sdk").join("src").join("lib.rs"); + + write_file(&source, "old source"); + sleep_for_mtime_tick(); + write_file(&binary, "older binary snapshot"); + sleep_for_mtime_tick(); + write_file(&source, "new source"); + + assert!(is_binary_stale_for_paths(&binary, &[source])); + } + + #[test] + fn binary_stays_fresh_when_binary_is_newer_than_inputs() { + let root = make_temp_dir("fresh-binary"); + let binary = root.join("target").join("debug").join("tnmsc"); + let source = root.join("sdk").join("src").join("lib.rs"); + + write_file(&source, "old source"); + sleep_for_mtime_tick(); + write_file(&binary, "new binary"); + + assert!(!is_binary_stale_for_paths(&binary, &[source])); + } + + #[test] + fn newest_modified_time_walks_nested_directories() { + let root = make_temp_dir("recursive-mtime"); + let early = root.join("sdk").join("src").join("early.rs"); + let late = root.join("sdk").join("src").join("nested").join("late.rs"); + + write_file(&early, "first"); + sleep_for_mtime_tick(); + write_file(&late, "second"); + + let root_modified = newest_modified_time(&root.join("sdk")).unwrap(); + let late_modified = fs::metadata(&late).unwrap().modified().unwrap(); + + assert_eq!(system_time_key(root_modified), system_time_key(late_modified)); + } + + fn is_binary_stale_for_paths(binary: &Path, inputs: &[PathBuf]) -> bool { + let Some(binary_modified) = fs::metadata(binary).ok().and_then(|meta| meta.modified().ok()) + else { + return true; + }; + + inputs + .iter() + .filter_map(|path| newest_modified_time(path)) + .any(|input_modified| input_modified > binary_modified) + } + + fn make_temp_dir(label: &str) -> PathBuf { + let unique = format!( + "tnmsc-local-tests-{}-{}-{}", + label, + std::process::id(), + SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + ); + let path = std::env::temp_dir().join(unique); + fs::create_dir_all(&path).unwrap(); + path + } + + fn write_file(path: &Path, content: &str) { + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).unwrap(); + } + fs::write(path, content).unwrap(); + } + + fn sleep_for_mtime_tick() { + std::thread::sleep(Duration::from_millis(25)); + } + + fn system_time_key(time: SystemTime) -> u128 { + time + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + } +} diff --git a/cli/local-tests/tests/agents_md_smoke.rs b/cli/local-tests/tests/agents_md_smoke.rs index 9f91fc88..cd8a0028 100644 --- a/cli/local-tests/tests/agents_md_smoke.rs +++ b/cli/local-tests/tests/agents_md_smoke.rs @@ -65,6 +65,8 @@ impl Drop for GlobalConfigGuard { } } +/// Verify that install generates both the project-root AGENTS.md and a child +/// .github/AGENTS.md with non-empty content. #[test] fn local_agents_md_install_generates_project_agents_md() { let runner = LocalTestRunner::new(); @@ -100,6 +102,8 @@ fn local_agents_md_install_generates_project_agents_md() { ); } +/// Verify that the generated AGENTS.md content exactly matches the aindex +/// source `app/memory-sync/agt.mdx`. #[test] fn local_agents_md_content_matches_aindex_source() { let runner = LocalTestRunner::new(); @@ -126,6 +130,8 @@ fn local_agents_md_content_matches_aindex_source() { ); } +/// Verify that the generated .github/AGENTS.md content exactly matches the aindex +/// source `app/memory-sync/.github/agt.mdx`. #[test] fn local_agents_md_child_content_matches_aindex_source() { let runner = LocalTestRunner::new(); @@ -152,6 +158,8 @@ fn local_agents_md_child_content_matches_aindex_source() { ); } +/// Verify that `tnmsc clean` removes both the project-root AGENTS.md and the child +/// .github/AGENTS.md. #[test] fn local_agents_md_clean_removes_files() { let runner = LocalTestRunner::new(); @@ -182,6 +190,8 @@ fn local_agents_md_clean_removes_files() { ); } +/// Verify that when `plugins.agentsMd` is set to `false`, install does NOT generate +/// AGENTS.md files. #[test] fn local_agents_md_disabled_by_config() { let runner = LocalTestRunner::new(); @@ -234,11 +244,10 @@ fn local_agents_md_disabled_by_config() { ); } -/// 回归测试:clean 必须始终清理所有插件生成的文件,即使该插件当前已被禁用。 -/// -/// 设计原因:用户可能在禁用某个插件之前已经运行过 install,导致该插件生成的文件 -/// 仍然残留在项目中。如果 clean 也跟随插件开关,则这些残留文件将永远无法被自动 -/// 清理。因此 clean 行为不受插件开关控制,install 行为才受插件开关控制。 +/// Regression guard: `tnmsc clean` must remove AGENTS.md files even when the agentsMd +/// plugin is currently disabled. Design rationale: if a user disabled the plugin after +/// previous installs, stale files must still be cleaned. Clean behavior is independent +/// of plugin switches; only install respects the plugin toggle. #[test] fn local_agents_md_clean_always_removes_files_even_when_disabled() { let runner = LocalTestRunner::new(); diff --git a/cli/local-tests/tests/claude_smoke.rs b/cli/local-tests/tests/claude_smoke.rs index 202b7c55..8d1196c3 100644 --- a/cli/local-tests/tests/claude_smoke.rs +++ b/cli/local-tests/tests/claude_smoke.rs @@ -9,6 +9,8 @@ use tnmsc_local_tests::LocalTestRunner; +/// Verify that install generates both the project-root CLAUDE.md and a child +/// .github/CLAUDE.md, both with non-empty content. #[test] fn local_claude_install_generates_project_claude_md() { let runner = LocalTestRunner::new(); @@ -44,6 +46,8 @@ fn local_claude_install_generates_project_claude_md() { ); } +/// Verify that the generated project CLAUDE.md content exactly matches the aindex +/// source file `app/memory-sync/agt.mdx`. Ensures no content drift. #[test] fn local_claude_project_content_matches_aindex_source() { let runner = LocalTestRunner::new(); @@ -70,6 +74,8 @@ fn local_claude_project_content_matches_aindex_source() { ); } +/// Verify that the generated .github/CLAUDE.md content exactly matches the aindex +/// source `app/memory-sync/.github/agt.mdx`. #[test] fn local_claude_child_content_matches_aindex_source() { let runner = LocalTestRunner::new(); @@ -96,6 +102,8 @@ fn local_claude_child_content_matches_aindex_source() { ); } +/// Verify that `tnmsc clean` removes ALL CLAUDE.md files recursively throughout +/// the project tree, not just the root one. #[test] fn local_claude_clean_removes_all_project_files() { let runner = LocalTestRunner::new(); @@ -163,6 +171,8 @@ fn local_claude_clean_removes_all_project_files() { ); } +/// Verify that the global ~/.claude/CLAUDE.md is generated (it persists independently +/// of project-level clean). #[test] fn local_claude_global_file_still_generated() { let runner = LocalTestRunner::new(); @@ -184,3 +194,156 @@ fn local_claude_global_file_still_generated() { .expect("global CLAUDE.md should be readable"); assert!(!content.is_empty(), "global CLAUDE.md should not be empty"); } + +/// Isolated regression test for categorized skills in Claude output. +/// Verifies that: +/// 1. `name` in SKILL.md matches the generated directory name +/// 2. child docs are compiled and emitted as `.md`, not `.mdx` +/// 3. clean removes the generated project tree +#[test] +fn regression_isolated_claude_skill_name_and_child_doc_extensions() { + let runner = LocalTestRunner::new(); + + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-claude-reverse-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let aindex_dir = workspace_dir.join("aindex"); + let skill_dir = aindex_dir + .join("skills") + .join("dev-tools") + .join("reverse-engineering"); + + std::fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + std::fs::create_dir_all(&aindex_dir).unwrap(); + std::fs::create_dir_all(&skill_dir).unwrap(); + + std::fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false, + "codex": false, + "claudeCode": true, + "opencode": false + } + }) + .to_string(), + ) + .unwrap(); + + std::fs::write( + aindex_dir.join("workspace.mdx"), + "---\ndescription: workspace\n---\nWorkspace prompt\n", + ) + .unwrap(); + std::fs::write( + aindex_dir.join("workspace.src.mdx"), + "---\ndescription: workspace\n---\nWorkspace prompt\n", + ) + .unwrap(); + + std::fs::write( + skill_dir.join("skill.src.mdx"), + "export default { name: 'reverse-engineering', description: 'Reverse engineering skill' }\n\n# Reverse\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("skill.mdx"), + "export default { name: 'reverse-engineering', description: 'Reverse engineering skill' }\n\n# Reverse\n", + ) + .unwrap(); + + for name in ["packet-capture", "reverse-tools"] { + std::fs::write( + skill_dir.join(format!("{name}.src.mdx")), + format!("---\ndescription: {name}\n---\n# {name}\n"), + ) + .unwrap(); + std::fs::write( + skill_dir.join(format!("{name}.mdx")), + format!("---\ndescription: {name}\n---\n# {name}\n"), + ) + .unwrap(); + } + + let temp_home_str = temp_home.to_string_lossy().into_owned(); + + let install = runner.run_at_with_env( + &workspace_dir, + &["install"], + &[("HOME", &temp_home_str)], + ); + install.assert_failure("isolated tnmsc install for claude should be blocked by protected root CLAUDE.md"); + assert!( + install.stderr.contains("Refusing to write protected path.") + || install.stderr.contains("CLAUDE.md: Refusing to write protected path."), + "expected protected-path failure for root CLAUDE.md, got stderr:\n{}", + install.stderr + ); + + let generated_skill_dir = workspace_dir + .join(".claude") + .join("skills") + .join("dev-tools-reverse-engineering"); + assert!( + generated_skill_dir.join("SKILL.md").is_file(), + "claude should generate SKILL.md for dev-tools-reverse-engineering" + ); + assert!( + generated_skill_dir.join("packet-capture.md").is_file(), + "claude should emit packet-capture child doc as .md" + ); + assert!( + generated_skill_dir.join("reverse-tools.md").is_file(), + "claude should emit reverse-tools child doc as .md" + ); + assert!( + !generated_skill_dir.join("packet-capture.mdx").exists(), + "claude must not emit packet-capture child doc as .mdx" + ); + assert!( + !generated_skill_dir.join("reverse-tools.mdx").exists(), + "claude must not emit reverse-tools child doc as .mdx" + ); + + let skill_content = std::fs::read_to_string(generated_skill_dir.join("SKILL.md")).unwrap(); + assert!( + skill_content.contains("name: dev-tools-reverse-engineering"), + "claude SKILL.md name field must match generated directory name" + ); + assert!( + skill_content.contains("skill: aindex/skills/dev-tools/reverse-engineering"), + "claude SKILL.md should keep the categorized source identifier" + ); + + let clean = runner.run_at_with_env(&workspace_dir, &["clean"], &[("HOME", &temp_home_str)]); + clean.assert_success("isolated tnmsc clean for claude"); + + assert!( + !workspace_dir.join(".claude").exists(), + "clean should remove the generated .claude tree" + ); +} diff --git a/cli/local-tests/tests/clean_blackbox.rs b/cli/local-tests/tests/clean_blackbox.rs index e6da7903..f83d5420 100644 --- a/cli/local-tests/tests/clean_blackbox.rs +++ b/cli/local-tests/tests/clean_blackbox.rs @@ -17,6 +17,7 @@ fn workspace_paths() -> (PathBuf, PathBuf, PathBuf, PathBuf) { ) } +/// Verify the basic clean lifecycle: install creates CLAUDE.md, clean removes it. #[test] fn local_clean_removes_project_claude_md() { let runner = LocalTestRunner::new(); @@ -44,6 +45,7 @@ fn local_clean_removes_project_claude_md() { ); } +/// Verify that `tnmsc clean --dry-run` does NOT delete files — it only previews what would be cleaned. #[test] fn local_clean_dry_run_does_not_remove_files() { let runner = LocalTestRunner::new(); @@ -68,6 +70,8 @@ fn local_clean_dry_run_does_not_remove_files() { ); } +/// Verify that running `tnmsc clean` inside ~/workspace/memory-sync/ only cleans +/// that project, not sibling projects like aindex or knowladge. #[test] fn local_clean_from_memory_sync_does_not_clean_other_projects() { let (home, memory_sync, aindex, knowladge) = workspace_paths(); @@ -124,6 +128,8 @@ fn local_clean_from_memory_sync_does_not_clean_other_projects() { ); } +/// Verify the reverse: running clean inside ~/workspace/aindex/ does not affect +/// memory-sync's generated files. #[test] fn local_clean_from_aindex_does_not_clean_memory_sync() { let (home, memory_sync, aindex, knowladge) = workspace_paths(); @@ -180,6 +186,8 @@ fn local_clean_from_aindex_does_not_clean_memory_sync() { ); } +/// Verify that running clean from ~/ (which is above all workspace projects) cleans +/// all projects under the workspace directory. #[test] fn local_clean_from_home_cleans_all_projects() { let (home, memory_sync, aindex, knowladge) = workspace_paths(); diff --git a/cli/local-tests/tests/codex_smoke.rs b/cli/local-tests/tests/codex_smoke.rs index 5a3e7da6..675c0180 100644 --- a/cli/local-tests/tests/codex_smoke.rs +++ b/cli/local-tests/tests/codex_smoke.rs @@ -22,6 +22,44 @@ fn assert_codex_plugin_enabled() { ); } +fn expected_installed_skill_names( + aindex_skills_dir: &std::path::Path, +) -> std::collections::HashSet { + let mut names = std::collections::HashSet::new(); + + for entry in std::fs::read_dir(aindex_skills_dir).unwrap().flatten() { + if !entry.file_type().map(|file_type| file_type.is_dir()).unwrap_or(false) { + continue; + } + + let first_level_dir = entry.path(); + let first_level_name = entry.file_name().to_string_lossy().to_string(); + let has_root_skill = first_level_dir.join("skill.mdx").is_file() + || first_level_dir.join("skill.src.mdx").is_file(); + + if has_root_skill { + names.insert(first_level_name); + continue; + } + + for nested_entry in std::fs::read_dir(&first_level_dir).unwrap().flatten() { + if !nested_entry + .file_type() + .map(|file_type| file_type.is_dir()) + .unwrap_or(false) + { + continue; + } + + let nested_name = nested_entry.file_name().to_string_lossy().to_string(); + names.insert(format!("{first_level_name}-{nested_name}")); + } + } + + names +} + +/// Verify that install generates the global ~/.codex/AGENTS.md with non-empty content. #[test] fn local_codex_install_generates_global_agents_md() { assert_codex_plugin_enabled(); @@ -49,6 +87,8 @@ fn local_codex_install_generates_global_agents_md() { ); } +/// Verify that the global ~/.codex/AGENTS.md content exactly matches the aindex +/// `global.mdx` source. #[test] fn local_codex_global_agents_md_matches_aindex_source() { assert_codex_plugin_enabled(); @@ -77,6 +117,7 @@ fn local_codex_global_agents_md_matches_aindex_source() { ); } +/// Verify that install creates the ~/.codex/prompts/ directory. #[test] fn local_codex_install_generates_global_prompts_dir() { assert_codex_plugin_enabled(); @@ -96,6 +137,8 @@ fn local_codex_install_generates_global_prompts_dir() { ); } +/// Verify that prompt files in ~/.codex/prompts/ are all .md files with correct format +/// (kebab-case fields like argument-hint, not camelCase argumentHint). #[test] fn local_codex_prompts_match_aindex_commands() { assert_codex_plugin_enabled(); @@ -154,6 +197,8 @@ fn local_codex_prompts_match_aindex_commands() { } } +/// Verify that codex prompt files do NOT contain a `command:` field (compatibility issue) +/// and that all YAML values are enclosed in double quotes. #[test] fn local_codex_prompts_no_command_field_and_quoted_values() { assert_codex_plugin_enabled(); @@ -236,6 +281,7 @@ fn local_codex_prompts_no_command_field_and_quoted_values() { } } +/// Verify that install creates the project-level .codex/ directory. #[test] fn local_codex_install_generates_project_codex_dir() { assert_codex_plugin_enabled(); @@ -255,6 +301,8 @@ fn local_codex_install_generates_project_codex_dir() { ); } +/// Verify that the project .codex/skills/ directory names exactly match the transformed +/// aindex/skills/ names (same count, same names). #[test] fn local_codex_project_skills_match_aindex_skills() { assert_codex_plugin_enabled(); @@ -278,42 +326,29 @@ fn local_codex_project_skills_match_aindex_skills() { .resolve_aindex_dir() .expect("aindex dir should exist"); let aindex_skills_dir = aindex_dir.join("skills"); - let aindex_skill_entries: Vec<_> = std::fs::read_dir(&aindex_skills_dir) - .unwrap() - .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false)) - .collect(); - - // Count project codex skills let project_skills_dir = runner.cwd().join(".codex").join("skills"); - let project_skill_entries: Vec<_> = std::fs::read_dir(&project_skills_dir) + let expected_names = expected_installed_skill_names(&aindex_skills_dir); + let project_names: std::collections::HashSet = std::fs::read_dir(&project_skills_dir) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|file_type| file_type.is_dir()).unwrap_or(false)) + .map(|entry| entry.file_name().to_string_lossy().to_string()) .collect(); assert_eq!( - aindex_skill_entries.len(), - project_skill_entries.len(), + expected_names.len(), + project_names.len(), "project .codex/skills should have same count as aindex/skills" ); - // Verify same directory names - let aindex_names: std::collections::HashSet = aindex_skill_entries - .iter() - .map(|e| e.file_name().to_string_lossy().to_string()) - .collect(); - let project_names: std::collections::HashSet = project_skill_entries - .iter() - .map(|e| e.file_name().to_string_lossy().to_string()) - .collect(); - assert_eq!( - aindex_names, project_names, - "project .codex/skills directory names should match aindex/skills" + expected_names, project_names, + "project .codex/skills directory names should match transformed aindex/skills names" ); } +/// Verify that global ~/.codex/agents/*.toml files are also present in the project +/// .codex/agents/ directory with matching filenames. #[test] fn local_codex_global_agents_copied_to_project() { assert_codex_plugin_enabled(); @@ -383,6 +418,8 @@ fn local_codex_global_agents_copied_to_project() { ); } +/// Verify that all files in the project .codex/agents/ directory are .toml files with +/// the expected `name` and `developer_instructions` fields. #[test] fn local_codex_project_agents_are_all_toml() { assert_codex_plugin_enabled(); @@ -437,6 +474,7 @@ fn local_codex_project_agents_are_all_toml() { } } +/// Verify that `tnmsc clean` removes the generated .codex/ directory. #[test] fn local_codex_clean_removes_files() { assert_codex_plugin_enabled(); @@ -461,6 +499,7 @@ fn local_codex_clean_removes_files() { ); } +/// Verify that `tnmsc dry-run` does NOT create the .codex/ directory. #[test] fn local_codex_dry_run_does_not_write() { assert_codex_plugin_enabled(); @@ -485,59 +524,215 @@ fn local_codex_dry_run_does_not_write() { ); } -/// Regression test: skill output directories should only contain SKILL.md. -/// -/// Prior bug: resources, child docs, and mcp config files were rendered inside -/// each skill directory (e.g. act/act/, references/*), creating incorrect -/// nested structures and duplicate content. -/// -/// Note: only checks plugins generated by `tnmsc install` (codex, opencode, -/// claude). droid (.factory/skills) is a separate plugin not emitted by -/// install; it has its own `skill_output_only_contains_skill_md` unit test. +/// Isolated regression test: install into a temp directory (not the real project) with +/// only codex+opencode enabled, using a minimal browser skill fixture. Verifies SKILL.md, +/// references/, templates/, and mcp.json are all generated, and that clean removes the +/// entire generated tree. #[test] -fn regression_skill_output_only_contains_skill_md() { - assert_codex_plugin_enabled(); - +fn regression_isolated_install_outputs_full_browser_skill_and_clean_removes_it() { let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - let install = runner.install(); - install.assert_success("tnmsc install"); + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-browser-skill-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let aindex_dir = workspace_dir.join("aindex"); + let skill_dir = aindex_dir.join("skills").join("browser").join("agent-browser"); + + std::fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + std::fs::create_dir_all(skill_dir.join("references")).unwrap(); + std::fs::create_dir_all(skill_dir.join("templates")).unwrap(); + std::fs::create_dir_all(skill_dir.join("assets")).unwrap(); + std::fs::create_dir_all(&aindex_dir).unwrap(); + + std::fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false, + "codex": true, + "claudeCode": false, + "opencode": true + } + }) + .to_string(), + ) + .unwrap(); + + std::fs::write( + aindex_dir.join("workspace.mdx"), + "---\ndescription: workspace\n---\nWorkspace prompt\n", + ) + .unwrap(); + std::fs::write( + aindex_dir.join("workspace.src.mdx"), + "---\ndescription: workspace\n---\nWorkspace prompt\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("skill.src.mdx"), + "export default { description: 'Browser skill' }\n\n# Browser Skill\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("skill.mdx"), + "export default { description: 'Browser skill' }\n\n# Browser Skill\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("references").join("linux-wsl.src.mdx"), + "---\ndescription: Linux WSL reference\n---\n# Linux WSL\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("references").join("linux-wsl.mdx"), + "---\ndescription: Linux WSL reference\n---\n# Linux WSL\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("references").join("authentication.src.mdx"), + "---\ndescription: Authentication reference\n---\n# Authentication\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("references").join("authentication.mdx"), + "---\ndescription: Authentication reference\n---\n# Authentication\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("templates").join("capture-workflow.sh"), + "#!/usr/bin/env bash\necho capture\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("templates").join("authenticated-session.sh"), + "#!/usr/bin/env bash\necho auth\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("assets").join("logo.png"), + [0x89_u8, 0x50, 0x4E, 0x47, 0x00, 0xFF], + ) + .unwrap(); + std::fs::write( + skill_dir.join("mcp.json"), + "{\n \"mcpServers\": {\n \"browser\": { \"command\": \"agent-browser\" }\n }\n}\n", + ) + .unwrap(); + + let temp_home_str = temp_home.to_string_lossy().into_owned(); + + let install = runner.run_at_with_env( + &workspace_dir, + &["install"], + &[("HOME", &temp_home_str)], + ); + install.assert_success("isolated tnmsc install"); - for (label, skills_dir) in [ - ("codex", runner.cwd().join(".codex").join("skills")), - ("opencode", runner.cwd().join(".opencode").join("skills")), - ("claude", runner.cwd().join(".claude").join("skills")), + for (label, skill_root) in [ + ("codex", workspace_dir.join(".codex").join("skills")), + ("opencode", workspace_dir.join(".opencode").join("skills")), ] { + let browser_skill_dir = skill_root.join("browser-agent-browser"); assert!( - skills_dir.is_dir(), - "{label} skills dir should exist: {}", - skills_dir.display() + browser_skill_dir.join("SKILL.md").is_file(), + "{label} should generate SKILL.md for browser-agent-browser" + ); + assert!( + browser_skill_dir.join("references").join("linux-wsl.md").is_file(), + "{label} should generate child docs under references/" + ); + assert!( + browser_skill_dir + .join("references") + .join("authentication.md") + .is_file(), + "{label} should generate every child doc under references/" + ); + assert!( + !browser_skill_dir.join("references").join("linux-wsl.mdx").exists(), + "{label} should not leave child docs as .mdx files" + ); + assert!( + !browser_skill_dir + .join("references") + .join("authentication.mdx") + .exists(), + "{label} should not leave any compiled child doc as .mdx" + ); + assert!( + browser_skill_dir + .join("templates") + .join("capture-workflow.sh") + .is_file(), + "{label} should generate resource files under templates/" + ); + assert!( + browser_skill_dir + .join("templates") + .join("authenticated-session.sh") + .is_file(), + "{label} should generate every template resource" + ); + assert!( + browser_skill_dir.join("assets").join("logo.png").is_file(), + "{label} should generate binary resource files under assets/" + ); + assert!( + browser_skill_dir.join("mcp.json").is_file(), + "{label} should generate mcp.json" + ); + let skill_content = std::fs::read_to_string(browser_skill_dir.join("SKILL.md")).unwrap(); + assert!( + skill_content.contains("name: browser-agent-browser"), + "{label} should align SKILL.md name field with the generated skill directory" + ); + assert_eq!( + std::fs::read(browser_skill_dir.join("assets").join("logo.png")).unwrap(), + vec![0x89_u8, 0x50, 0x4E, 0x47, 0x00, 0xFF], + "{label} should preserve binary resource bytes" ); - - for entry in std::fs::read_dir(&skills_dir).unwrap().flatten() { - if !entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { - continue; - } - let skill_name = entry.file_name().to_string_lossy().to_string(); - let skill_dir = entry.path(); - - let mut entries: Vec<_> = std::fs::read_dir(&skill_dir) - .unwrap() - .flatten() - .map(|e| e.file_name().to_string_lossy().to_string()) - .collect(); - entries.sort(); - - assert_eq!( - entries, - vec!["SKILL.md"], - "{label}/skills/{skill_name} should only contain SKILL.md, got: {:?}", - entries - ); - } } + + let stale_file = workspace_dir + .join(".codex") + .join("skills") + .join("browser-agent-browser") + .join("stale.txt"); + std::fs::write(&stale_file, "stale").unwrap(); + assert!(stale_file.is_file(), "stale test file should exist before clean"); + + let clean = runner.run_at_with_env(&workspace_dir, &["clean"], &[("HOME", &temp_home_str)]); + clean.assert_success("isolated tnmsc clean"); + + assert!( + !workspace_dir.join(".codex").exists(), + "clean should remove the entire generated .codex tree" + ); + assert!( + !workspace_dir.join(".opencode").exists(), + "clean should remove the entire generated .opencode tree" + ); } diff --git a/cli/local-tests/tests/command_contract.rs b/cli/local-tests/tests/command_contract.rs index 26ff877a..74cda88a 100644 --- a/cli/local-tests/tests/command_contract.rs +++ b/cli/local-tests/tests/command_contract.rs @@ -4,6 +4,8 @@ use tnmsc_local_tests::{EXPECTED_SUBCOMMANDS, LocalTestRunner, current_package_version}; +/// Verify that `tnmsc help` lists all expected subcommands (install, dry-run, clean, version, help). +/// This ensures the CLI contract with end-users is not accidentally broken. #[test] fn local_cli_help_shows_expected_subcommands() { let runner = LocalTestRunner::new(); @@ -18,6 +20,8 @@ fn local_cli_help_shows_expected_subcommands() { } } +/// Verify that `tnmsc version` outputs the same version string as Cargo.toml. +/// Prevents version drift between the binary and the package metadata. #[test] fn local_cli_version_matches_package_version() { let runner = LocalTestRunner::new(); diff --git a/cli/local-tests/tests/dry_run_smoke.rs b/cli/local-tests/tests/dry_run_smoke.rs index 7f3d96cc..4a14644b 100644 --- a/cli/local-tests/tests/dry_run_smoke.rs +++ b/cli/local-tests/tests/dry_run_smoke.rs @@ -4,6 +4,8 @@ use tnmsc_local_tests::LocalTestRunner; +/// Verify that `tnmsc dry-run` reports what would be written but does NOT create +/// any project files. The core safety guarantee of dry-run mode. #[test] fn local_dry_run_does_not_write_project_files() { let runner = LocalTestRunner::new(); diff --git a/cli/local-tests/tests/install_smoke.rs b/cli/local-tests/tests/install_smoke.rs index 5342e4e2..4b31a9a5 100644 --- a/cli/local-tests/tests/install_smoke.rs +++ b/cli/local-tests/tests/install_smoke.rs @@ -7,6 +7,8 @@ use tnmsc_local_tests::LocalTestRunner; +/// Verify that `tnmsc install` generates both project-level CLAUDE.md and global +/// ~/.claude/CLAUDE.md with non-empty content. #[test] fn local_install_generates_project_claude_md() { let runner = LocalTestRunner::new(); @@ -46,6 +48,8 @@ fn local_install_generates_project_claude_md() { ); } +/// Verify that running `tnmsc install` twice in a row produces identical output. +/// Install must be safely repeatable without side effects. #[test] fn local_install_idempotent() { let runner = LocalTestRunner::new(); @@ -82,6 +86,9 @@ fn local_install_idempotent() { ); } +/// Verify the full .claude/ directory structure after install: agents/, skills/, +/// commands/, rules/ subdirectories, all with correctly formatted files +/// (YAML front matter, expected fields like agent:/command:/skill:/rule:). #[test] fn local_install_generates_claude_directory_structure() { let runner = LocalTestRunner::new(); @@ -265,6 +272,8 @@ fn local_install_generates_claude_directory_structure() { } } +/// Verify that template interpolation in the global CLAUDE.md works correctly: +/// `{profile.username}` is replaced with `TrueNine` in both inline text and URLs. #[test] fn local_install_claude_global_md_url_interpolation() { let runner = LocalTestRunner::new(); @@ -312,6 +321,8 @@ fn local_install_claude_global_md_url_interpolation() { ); } +/// Guard test: ensure the compiled tnmsc binary exists before running other tests. +/// Provides a clear error message with build instructions if missing. #[test] fn binary_exists_before_tests() { let binary = tnmsc_local_tests::binary_path(); diff --git a/cli/local-tests/tests/logging_clean.rs b/cli/local-tests/tests/logging_clean.rs index 001402dc..30c116f3 100644 --- a/cli/local-tests/tests/logging_clean.rs +++ b/cli/local-tests/tests/logging_clean.rs @@ -2,6 +2,8 @@ use tnmsc_local_tests::LocalTestRunner; +/// Verify that `--trace` clean outputs all major spans: +/// cleanup.discover and cleanup.execute. #[test] fn clean_outputs_key_spans_and_events() { let runner = LocalTestRunner::new(); @@ -34,6 +36,7 @@ fn clean_outputs_key_spans_and_events() { ); } +/// Verify that `--info` clean outputs a deletion summary (what files were removed). #[test] fn clean_outputs_deletion_summary() { let runner = LocalTestRunner::new(); diff --git a/cli/local-tests/tests/logging_dry_run.rs b/cli/local-tests/tests/logging_dry_run.rs index 6e1c1b17..fa4a656e 100644 --- a/cli/local-tests/tests/logging_dry_run.rs +++ b/cli/local-tests/tests/logging_dry_run.rs @@ -2,6 +2,8 @@ use tnmsc_local_tests::LocalTestRunner; +/// Verify that `--trace` dry-run outputs all major spans: +/// config.load, context.collect, output.build. #[test] fn dry_run_outputs_key_spans_and_events() { let runner = LocalTestRunner::new(); @@ -35,6 +37,7 @@ fn dry_run_outputs_key_spans_and_events() { ); } +/// Verify that `--info` dry-run outputs a plan summary (what files would be written). #[test] fn dry_run_outputs_plan_preview() { let runner = LocalTestRunner::new(); diff --git a/cli/local-tests/tests/logging_error_feedback.rs b/cli/local-tests/tests/logging_error_feedback.rs index 160bd7ee..8a453718 100644 --- a/cli/local-tests/tests/logging_error_feedback.rs +++ b/cli/local-tests/tests/logging_error_feedback.rs @@ -19,6 +19,8 @@ fn run_without_global_config( ) } +/// Verify that running install without config outputs a structured diagnostic with a +/// "What happened" section, a fix suggestion mentioning .tnmsc.json, and actionable next steps. #[test] fn missing_config_outputs_diagnostic_with_fix() { let runner = LocalTestRunner::new(); @@ -51,6 +53,8 @@ fn missing_config_outputs_diagnostic_with_fix() { ); } +/// Verify that `--error` log level still shows the diagnostic structure when config +/// is missing (error diagnostics are never suppressed regardless of log level). #[test] fn missing_config_at_error_level_shows_diagnostic() { let runner = LocalTestRunner::new(); diff --git a/cli/local-tests/tests/logging_install_observability.rs b/cli/local-tests/tests/logging_install_observability.rs index ac9b2585..46eb0542 100644 --- a/cli/local-tests/tests/logging_install_observability.rs +++ b/cli/local-tests/tests/logging_install_observability.rs @@ -2,6 +2,8 @@ use tnmsc_local_tests::LocalTestRunner; +/// Verify that `--trace` install outputs all major spans: config.load, context.collect, +/// output.build, files.write, plus collector sub-spans. #[test] fn install_outputs_key_spans_and_events() { let runner = LocalTestRunner::new(); @@ -62,6 +64,7 @@ fn install_outputs_key_spans_and_events() { ); } +/// Verify that `--info` install outputs plugin resolution information ("Plugins resolved"). #[test] fn install_outputs_plugin_resolution() { let runner = LocalTestRunner::new(); @@ -81,6 +84,7 @@ fn install_outputs_plugin_resolution() { ); } +/// Verify that `--debug` install outputs individual file write/skip events. #[test] fn install_outputs_file_write_events() { let runner = LocalTestRunner::new(); diff --git a/cli/local-tests/tests/logging_levels.rs b/cli/local-tests/tests/logging_levels.rs index fa5fc249..6baa0f61 100644 --- a/cli/local-tests/tests/logging_levels.rs +++ b/cli/local-tests/tests/logging_levels.rs @@ -2,6 +2,8 @@ use tnmsc_local_tests::LocalTestRunner; +/// Verify that `--trace` log level outputs fine-grained collector span events +/// like `collect.aindex_resolvers` and `config.load`. #[test] fn trace_level_outputs_span_events() { let runner = LocalTestRunner::new(); @@ -29,6 +31,8 @@ fn trace_level_outputs_span_events() { ); } +/// Verify that the default (info) log level outputs top-level events like +/// "Install started" and "Install completed". #[test] fn info_level_outputs_top_level_events() { let runner = LocalTestRunner::new(); @@ -53,6 +57,8 @@ fn info_level_outputs_top_level_events() { ); } +/// Verify that `--error` log level suppresses info events but still outputs +/// error diagnostics when config is missing. #[test] fn error_level_only_outputs_errors() { let runner = LocalTestRunner::new(); @@ -83,6 +89,8 @@ fn error_level_only_outputs_errors() { ); } +/// Verify that `--debug` log level outputs intermediate events like +/// "Context collected" and "Output files built". #[test] fn debug_level_outputs_debug_events() { let runner = LocalTestRunner::new(); diff --git a/cli/local-tests/tests/opencode_agent_mode_validation.rs b/cli/local-tests/tests/opencode_agent_mode_validation.rs index 00c08d11..57cd91ff 100644 --- a/cli/local-tests/tests/opencode_agent_mode_validation.rs +++ b/cli/local-tests/tests/opencode_agent_mode_validation.rs @@ -69,6 +69,9 @@ fn extract_mode_from_agent_file(content: &str) -> Option { None } +/// Verify that every generated agent file has a `mode` field whose value is one of +/// the three valid options: "subagent", "primary", or "all". +/// Invalid values cause opencode startup errors. #[test] fn local_opencode_agent_mode_must_be_valid() { let runner = LocalTestRunner::new(); diff --git a/cli/local-tests/tests/opencode_smoke.rs b/cli/local-tests/tests/opencode_smoke.rs index 8968b8e4..82ee4092 100644 --- a/cli/local-tests/tests/opencode_smoke.rs +++ b/cli/local-tests/tests/opencode_smoke.rs @@ -4,6 +4,9 @@ use tnmsc_local_tests::LocalTestRunner; +/// Comprehensive verification of the .opencode/ directory after install: AGENTS.md +/// exists, and agents/, skills/, commands/, rules/ subdirectories all contain correctly +/// formatted files with YAML front matter and expected source identifiers. #[test] fn local_opencode_install_generates_project_agents_md() { let runner = LocalTestRunner::new(); @@ -207,6 +210,7 @@ fn local_opencode_install_generates_project_agents_md() { } } +/// Verify that the global ~/.config/opencode/AGENTS.md is generated with non-empty content. #[test] fn local_opencode_install_generates_global_agents_md() { let runner = LocalTestRunner::new(); @@ -232,6 +236,7 @@ fn local_opencode_install_generates_global_agents_md() { ); } +/// Verify that two consecutive installs produce identical .opencode/AGENTS.md content. #[test] fn local_opencode_install_idempotent() { let runner = LocalTestRunner::new(); @@ -264,6 +269,7 @@ fn local_opencode_install_idempotent() { ); } +/// Verify that `tnmsc clean` removes the generated .opencode/ directory. #[test] fn local_opencode_clean_removes_files() { let runner = LocalTestRunner::new(); @@ -285,6 +291,7 @@ fn local_opencode_clean_removes_files() { ); } +/// Verify that `tnmsc dry-run` does NOT create .opencode/AGENTS.md. #[test] fn local_opencode_dry_run_does_not_write() { let runner = LocalTestRunner::new(); @@ -307,6 +314,8 @@ fn local_opencode_dry_run_does_not_write() { ); } +/// Verify that `{profile.username}` template interpolation works in the global opencode +/// AGENTS.md — both inline text and URLs are correctly evaluated. #[test] fn local_opencode_global_md_url_interpolation() { let runner = LocalTestRunner::new(); @@ -343,6 +352,8 @@ fn local_opencode_global_md_url_interpolation() { ); } +/// Verify that the project-level .opencode/AGENTS.md includes global memory content +/// (is at least as long as the global file and contains workspace-level data like 'TrueNine'). #[test] fn local_opencode_project_content_includes_workspace_memory() { let runner = LocalTestRunner::new(); @@ -373,10 +384,9 @@ fn local_opencode_project_content_includes_workspace_memory() { ); } -/// 断言生成的 .opencode/agents/*.md 中不包含 `model` 字段。 -/// -/// NOTE: `model` 是未来功能(per-agent model override),当前不实现, -/// 因此生成时必须将其剥离。此测试在功能落地前充当回归保护。 +/// Regression guard: generated agent .md files must NOT contain a `model:` field. +/// Per-agent model override is a future feature — premature inclusion would break +/// opencode schema validation. #[test] fn local_opencode_agent_md_should_not_contain_model_field() { let runner = LocalTestRunner::new(); @@ -410,6 +420,8 @@ fn local_opencode_agent_md_should_not_contain_model_field() { } } +/// Verify that every generated agent file contains `mode: subagent` (or `mode: "subagent"`) +/// in its YAML front matter. Subagent mode is the expected default for memory-sync agents. #[test] fn local_opencode_agent_md_must_include_subagent_mode() { let runner = LocalTestRunner::new(); @@ -443,11 +455,9 @@ fn local_opencode_agent_md_must_include_subagent_mode() { } } -/// 回归测试:opencode agent 的 `color` 字段必须匹配 hex 格式 `^#[0-9a-fA-F]{6}$`。 -/// -/// opencode 配置 schema 要求 color 为 6 位 hex 值(如 `#FF5733`), -/// 不接受 CSS 命名颜色(如 `blue`、`red`)。 -/// 参见: https://github.com/opencode-ai/opencode 配置 schema 中 color 字段的 pattern 约束。 +/// Regression guard: the `color` field in agent files must be a 6-digit hex value (#RRGGBB). +/// opencode's config schema rejects CSS named colors like `blue` or `red`. +/// See: https://github.com/opencode-ai/opencode config schema pattern constraint. #[test] fn local_opencode_agent_md_color_must_be_hex_format() { fn is_valid_hex_color(s: &str) -> bool { @@ -508,13 +518,9 @@ fn local_opencode_agent_md_color_must_be_hex_format() { } } -/// 回归测试:opencode 不应在任何子目录下生成嵌套的 .opencode/AGENTS.md。 -/// -/// opencode 只支持两个位置的 AGENTS.md: -/// 1. 全局 ~/.config/opencode/AGENTS.md -/// 2. 项目根目录 /.opencode/AGENTS.md -/// -/// 子目录(如 cli/.opencode/AGENTS.md)属于严重错误,会导致 opencode 行为异常。 +/// Regression guard: opencode only supports AGENTS.md at the project root .opencode/ — +/// no nested subdirectory .opencode/AGENTS.md files should be generated. +/// Nested files cause opencode to behave incorrectly. #[test] fn local_opencode_no_nested_agents_md() { let runner = LocalTestRunner::new(); @@ -574,3 +580,150 @@ fn local_opencode_no_nested_agents_md() { .join("\n") ); } + +/// Isolated regression test for categorized skills with nested child docs. +/// Verifies that: +/// 1. `name` in SKILL.md matches the generated directory name +/// 2. child docs are compiled and emitted as `.md`, not `.mdx` +/// 3. clean removes the generated project tree +#[test] +fn regression_isolated_opencode_skill_name_and_child_doc_extensions() { + let runner = LocalTestRunner::new(); + + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-opencode-reverse-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let aindex_dir = workspace_dir.join("aindex"); + let skill_dir = aindex_dir + .join("skills") + .join("dev-tools") + .join("reverse-engineering"); + + std::fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + std::fs::create_dir_all(&aindex_dir).unwrap(); + std::fs::create_dir_all(&skill_dir).unwrap(); + + std::fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false, + "codex": false, + "claudeCode": false, + "opencode": true + } + }) + .to_string(), + ) + .unwrap(); + + std::fs::write( + aindex_dir.join("workspace.mdx"), + "---\ndescription: workspace\n---\nWorkspace prompt\n", + ) + .unwrap(); + std::fs::write( + aindex_dir.join("workspace.src.mdx"), + "---\ndescription: workspace\n---\nWorkspace prompt\n", + ) + .unwrap(); + + std::fs::write( + skill_dir.join("skill.src.mdx"), + "export default { name: 'reverse-engineering', description: 'Reverse engineering skill' }\n\n# Reverse\n", + ) + .unwrap(); + std::fs::write( + skill_dir.join("skill.mdx"), + "export default { name: 'reverse-engineering', description: 'Reverse engineering skill' }\n\n# Reverse\n", + ) + .unwrap(); + + for name in ["packet-capture", "reverse-tools"] { + std::fs::write( + skill_dir.join(format!("{name}.src.mdx")), + format!("---\ndescription: {name}\n---\n# {name}\n"), + ) + .unwrap(); + std::fs::write( + skill_dir.join(format!("{name}.mdx")), + format!("---\ndescription: {name}\n---\n# {name}\n"), + ) + .unwrap(); + } + + let temp_home_str = temp_home.to_string_lossy().into_owned(); + + let install = runner.run_at_with_env( + &workspace_dir, + &["install"], + &[("HOME", &temp_home_str)], + ); + install.assert_success("isolated tnmsc install for opencode"); + + let generated_skill_dir = workspace_dir + .join(".opencode") + .join("skills") + .join("dev-tools-reverse-engineering"); + assert!( + generated_skill_dir.join("SKILL.md").is_file(), + "opencode should generate SKILL.md for dev-tools-reverse-engineering" + ); + assert!( + generated_skill_dir.join("packet-capture.md").is_file(), + "opencode should emit packet-capture child doc as .md" + ); + assert!( + generated_skill_dir.join("reverse-tools.md").is_file(), + "opencode should emit reverse-tools child doc as .md" + ); + assert!( + !generated_skill_dir.join("packet-capture.mdx").exists(), + "opencode must not emit packet-capture child doc as .mdx" + ); + assert!( + !generated_skill_dir.join("reverse-tools.mdx").exists(), + "opencode must not emit reverse-tools child doc as .mdx" + ); + + let skill_content = std::fs::read_to_string(generated_skill_dir.join("SKILL.md")).unwrap(); + assert!( + skill_content.contains("name: dev-tools-reverse-engineering"), + "opencode SKILL.md name field must match generated directory name" + ); + assert!( + skill_content.contains("skill: aindex/skills/dev-tools/reverse-engineering"), + "opencode SKILL.md should keep the categorized source identifier" + ); + + let clean = runner.run_at_with_env(&workspace_dir, &["clean"], &[("HOME", &temp_home_str)]); + clean.assert_success("isolated tnmsc clean for opencode"); + + assert!( + !workspace_dir.join(".opencode").exists(), + "clean should remove the generated .opencode tree" + ); +} diff --git a/cli/local-tests/tests/rules_source_smoke.rs b/cli/local-tests/tests/rules_source_smoke.rs index 93438dc8..d2eaebe9 100644 --- a/cli/local-tests/tests/rules_source_smoke.rs +++ b/cli/local-tests/tests/rules_source_smoke.rs @@ -116,6 +116,9 @@ fn collect_src_mdx_files(dir: &Path) -> Vec { files } +/// Verify that all aindex rule source files (.src.mdx) use the `globs` field +/// (not `paths`) in their export default. The SDK is responsible for converting +/// globs → paths during output; source files must use globs. #[test] fn local_rules_src_mdx_uses_globs_not_paths() { let runner = LocalTestRunner::new(); @@ -192,6 +195,9 @@ fn local_rules_src_mdx_uses_globs_not_paths() { ); } +/// Verify that the generated rule output files (e.g. .claude/rules/*.md) use `paths:` +/// in their YAML front matter, not `globs:`. This confirms the SDK's globs→paths +/// conversion is working correctly. #[test] fn local_rules_globs_converted_to_paths_in_output() { let runner = LocalTestRunner::new(); diff --git a/cli/local-tests/tests/trae_smoke.rs b/cli/local-tests/tests/trae_smoke.rs index a9caa464..8145778c 100644 --- a/cli/local-tests/tests/trae_smoke.rs +++ b/cli/local-tests/tests/trae_smoke.rs @@ -5,6 +5,8 @@ use std::fs; use tnmsc_local_tests::LocalTestRunner; +/// Guard test: ensure the compiled tnmsc binary exists. Provides a clear +/// build instruction error if missing. #[test] fn binary_exists_before_tests() { let binary = tnmsc_local_tests::binary_path(); @@ -15,6 +17,8 @@ fn binary_exists_before_tests() { ); } +/// Verify that install generates .trae/steering/GLOBAL.md and does NOT generate +/// the deprecated .trae-cn/ path. #[test] fn local_trae_steering_generated_after_install() { let runner = LocalTestRunner::new(); @@ -39,6 +43,7 @@ fn local_trae_steering_generated_after_install() { ); } +/// Verify that two consecutive installs produce identical .trae/steering/GLOBAL.md content. #[test] fn local_trae_steering_idempotent() { let runner = LocalTestRunner::new(); @@ -78,6 +83,7 @@ fn local_trae_steering_idempotent() { ); } +/// Verify that `tnmsc clean` removes the generated .trae/steering/GLOBAL.md. #[test] fn local_trae_steering_removed_after_clean() { let runner = LocalTestRunner::new(); @@ -99,6 +105,9 @@ fn local_trae_steering_removed_after_clean() { ); } +/// Verify backward-compatible cleanup: even if a legacy .trae-cn/ directory exists, +/// `tnmsc clean` removes it along with .trae/. This ensures old installations +/// are properly migrated. #[test] fn local_trae_cn_cleaned_for_compatibility() { let runner = LocalTestRunner::new(); diff --git a/sdk/src/domain/output_plans/claude_code_output_plan.rs b/sdk/src/domain/output_plans/claude_code_output_plan.rs index 4a7d13a3..d2af8337 100644 --- a/sdk/src/domain/output_plans/claude_code_output_plan.rs +++ b/sdk/src/domain/output_plans/claude_code_output_plan.rs @@ -150,7 +150,7 @@ fn build_output_files( }; let claude_skills_dir = project_root_dir.join(".claude").join("skills"); for skill in skills { - let skill_sub_dir = claude_skills_dir.join(&skill.skill_name); + let skill_sub_dir = claude_skills_dir.join(resolve_skill_dir_name(skill)); // Main SKILL.md with YAML front matter output_files.push(BaseOutputFileDeclarationDto { @@ -162,6 +162,8 @@ fn build_output_files( content: build_skill_content(skill), encoding: None, }); + + append_skill_supporting_files(&mut output_files, &skill_sub_dir, skill); } } } @@ -220,6 +222,26 @@ fn merge_workspace_root_memory(global_memory: Option<&str>, workspace_prompt: &s } } +fn resolve_skill_dir_name(skill: &crate::domain::plugin_shared::SkillPrompt) -> String { + if let Some(category_name) = skill.category_name.as_deref().map(str::trim) + && !category_name.is_empty() + { + return format!("{category_name}-{}", skill.skill_name); + } + + skill.skill_name.clone() +} + +fn build_skill_source_identifier(skill: &crate::domain::plugin_shared::SkillPrompt) -> String { + if let Some(category_name) = skill.category_name.as_deref().map(str::trim) + && !category_name.is_empty() + { + return format!("aindex/skills/{category_name}/{}", skill.skill_name); + } + + format!("aindex/skills/{}", skill.skill_name) +} + fn build_rule_content(rule: &crate::domain::plugin_shared::RulePrompt) -> String { let Some(ref yaml_fm) = rule.yaml_front_matter else { return rule.content.clone(); @@ -317,7 +339,11 @@ fn build_skill_content(skill: &crate::domain::plugin_shared::SkillPrompt) -> Str // Add skill source identifier metadata.insert( "skill".to_string(), - Value::String(format!("aindex/skills/{}", skill.skill_name)), + Value::String(build_skill_source_identifier(skill)), + ); + metadata.insert( + "name".to_string(), + Value::String(resolve_skill_dir_name(skill)), ); // Filter out empty arrays and null values @@ -332,6 +358,62 @@ fn build_skill_content(skill: &crate::domain::plugin_shared::SkillPrompt) -> Str wrap_yaml_front_matter(&metadata, &skill.content) } +fn append_skill_supporting_files( + output_files: &mut Vec, + skill_sub_dir: &std::path::Path, + skill: &crate::domain::plugin_shared::SkillPrompt, +) { + if let Some(child_docs) = skill.child_docs.as_ref() { + for child_doc in child_docs { + output_files.push(BaseOutputFileDeclarationDto { + path: skill_sub_dir + .join(resolve_child_doc_output_relative_path(&child_doc.relative_path)) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: child_doc.content.clone(), + encoding: None, + }); + } + } + + if let Some(resources) = skill.resources.as_ref() { + for resource in resources { + output_files.push(BaseOutputFileDeclarationDto { + path: skill_sub_dir + .join(&resource.relative_path) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: resource.content.clone(), + encoding: match resource.encoding { + crate::domain::plugin_shared::SkillResourceEncoding::Base64 => { + Some("base64".to_string()) + } + crate::domain::plugin_shared::SkillResourceEncoding::Text => None, + }, + }); + } + } + + if let Some(mcp_config) = skill.mcp_config.as_ref() { + output_files.push(BaseOutputFileDeclarationDto { + path: skill_sub_dir.join("mcp.json").to_string_lossy().into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: mcp_config.raw_content.clone(), + encoding: None, + }); + } +} + +fn resolve_child_doc_output_relative_path(relative_path: &str) -> String { + if let Some(stripped) = relative_path.strip_suffix(".mdx") { + return format!("{stripped}.md"); + } + + relative_path.to_string() +} + fn wrap_yaml_front_matter(metadata: &serde_json::Map, content: &str) -> String { if metadata.is_empty() { return content.to_string(); @@ -440,6 +522,7 @@ mod tests { content: "body".to_string(), length: 4, skill_name: name.to_string(), + category_name: None, dir: crate::infra::path_types::RelativePath::new(name, "/workspace/aindex/skills"), yaml_front_matter: Some(SkillYAMLFrontMatter { description: Some("desc".to_string()), @@ -458,6 +541,19 @@ mod tests { raw_front_matter: None, markdown_ast: None, markdown_contents: None, + }, SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "linux-wsl".to_string(), + length: 9, + file_path_kind: crate::infra::path_types::FilePathKind::Relative, + relative_path: "references/linux-wsl.mdx".to_string(), + dir: crate::infra::path_types::RelativePath::new( + "references/linux-wsl.mdx", + "/workspace/aindex/skills/test", + ), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, }]), resources: Some(vec![SkillResource { prompt_type: PromptKind::SkillResource, @@ -468,6 +564,24 @@ mod tests { encoding: SkillResourceEncoding::Text, length: 5, mime_type: None, + }, SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "sh".to_string(), + file_name: "capture-workflow.sh".to_string(), + relative_path: "templates/capture-workflow.sh".to_string(), + content: "#!/usr/bin/env bash\necho capture\n".to_string(), + encoding: SkillResourceEncoding::Text, + length: 32, + mime_type: None, + }, SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "bin".to_string(), + file_name: "blob.bin".to_string(), + relative_path: "assets/blob.bin".to_string(), + content: "AAEC".to_string(), + encoding: SkillResourceEncoding::Base64, + length: 3, + mime_type: Some("application/octet-stream".to_string()), }]), mcp_config: Some(SkillMcpConfig { prompt_type: PromptKind::SkillMcpConfig, @@ -479,7 +593,7 @@ mod tests { } #[test] - fn skill_output_only_contains_skill_md() { + fn skill_output_includes_child_docs_resources_and_mcp_config() { use crate::domain::plugin_shared::*; let skill = make_test_skill("test-skill"); @@ -517,14 +631,70 @@ mod tests { assert_eq!( skill_paths.len(), - 1, - "should only have SKILL.md, got: {:?}", + 7, + "skill output should include main doc, child docs, resources, and mcp config, got: {:?}", skill_paths ); + assert!(skill_paths.iter().any(|path| path.ends_with("SKILL.md"))); + assert!(skill_paths.iter().any(|path| path.ends_with("guide.md"))); + assert!(skill_paths.iter().any(|path| path.ends_with("references/linux-wsl.md"))); + assert!(skill_paths.iter().any(|path| path.ends_with("assets/notes.txt"))); + assert!(skill_paths.iter().any(|path| path.ends_with("templates/capture-workflow.sh"))); + assert!(skill_paths.iter().any(|path| path.ends_with("assets/blob.bin"))); + assert!(skill_paths.iter().any(|path| path.ends_with("mcp.json"))); + + let binary_resource = plan + .output_files + .iter() + .find(|file| file.path.ends_with("assets/blob.bin")) + .unwrap(); + assert_eq!(binary_resource.encoding.as_deref(), Some("base64")); + } + + #[test] + fn categorized_skill_uses_prefixed_directory_and_source_identifier() { + use crate::domain::plugin_shared::*; + + let mut skill = make_test_skill("reverse-engineering"); + skill.category_name = Some("dev-tools".to_string()); + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new("/workspace"), + projects: vec![Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: "root".to_string(), + length: 4, + file_path_kind: FilePathKind::Root, + dir: RootPath::new("/workspace"), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }), + ..Project::default() + }], + }), + skills: Some(vec![skill]), + ..OutputContext::default() + }; + + let plan = build_claude_code_output_plan(&context).unwrap(); + let skill_file = plan + .output_files + .iter() + .find(|file| { + file + .path + .contains(".claude/skills/dev-tools-reverse-engineering/SKILL.md") + }) + .unwrap(); + + assert!(skill_file.content.contains("name: dev-tools-reverse-engineering")); assert!( - skill_paths[0].ends_with("SKILL.md"), - "output should be SKILL.md, got: {}", - skill_paths[0] + skill_file.content.contains("skill: aindex/skills/dev-tools/reverse-engineering") ); } } diff --git a/sdk/src/domain/output_plans/codex_output_plan.rs b/sdk/src/domain/output_plans/codex_output_plan.rs index f3169eda..37de4620 100644 --- a/sdk/src/domain/output_plans/codex_output_plan.rs +++ b/sdk/src/domain/output_plans/codex_output_plan.rs @@ -32,6 +32,26 @@ const CODEX_AGENTS_DIR: &str = "agents"; const CODEX_SKILLS_DIR: &str = "skills"; const PROJECT_SCOPE: &str = "project"; +fn resolve_skill_dir_name(skill: &crate::domain::plugin_shared::SkillPrompt) -> String { + if let Some(category_name) = skill.category_name.as_deref().map(str::trim) + && !category_name.is_empty() + { + return format!("{category_name}-{}", skill.skill_name); + } + + skill.skill_name.clone() +} + +fn build_skill_source_identifier(skill: &crate::domain::plugin_shared::SkillPrompt) -> String { + if let Some(category_name) = skill.category_name.as_deref().map(str::trim) + && !category_name.is_empty() + { + return format!("aindex/skills/{category_name}/{}", skill.skill_name); + } + + format!("aindex/skills/{}", skill.skill_name) +} + pub fn collect_codex_output_plan(context_json: &str) -> Result { let context = OutputContext::from_json(context_json)?; let plan = build_codex_output_plan(&context)?; @@ -133,7 +153,7 @@ fn build_output_files( .join(CODEX_GLOBAL_CONFIG_DIR) .join(CODEX_SKILLS_DIR); for skill in skills { - let skill_sub_dir = codex_skills_dir.join(&skill.skill_name); + let skill_sub_dir = codex_skills_dir.join(resolve_skill_dir_name(skill)); output_files.push(BaseOutputFileDeclarationDto { path: skill_sub_dir @@ -144,6 +164,8 @@ fn build_output_files( content: build_skill_content(skill), encoding: None, }); + + append_skill_supporting_files(&mut output_files, &skill_sub_dir, skill); } } } @@ -249,7 +271,11 @@ fn build_skill_content(skill: &crate::domain::plugin_shared::SkillPrompt) -> Str // Add skill source identifier metadata.insert( "skill".to_string(), - serde_json::Value::String(format!("aindex/skills/{}", skill.skill_name)), + serde_json::Value::String(build_skill_source_identifier(skill)), + ); + metadata.insert( + "name".to_string(), + serde_json::Value::String(resolve_skill_dir_name(skill)), ); // Filter out empty arrays and null values @@ -264,6 +290,62 @@ fn build_skill_content(skill: &crate::domain::plugin_shared::SkillPrompt) -> Str wrap_yaml_front_matter(&metadata, &skill.content) } +fn append_skill_supporting_files( + output_files: &mut Vec, + skill_sub_dir: &std::path::Path, + skill: &crate::domain::plugin_shared::SkillPrompt, +) { + if let Some(child_docs) = skill.child_docs.as_ref() { + for child_doc in child_docs { + output_files.push(BaseOutputFileDeclarationDto { + path: skill_sub_dir + .join(resolve_child_doc_output_relative_path(&child_doc.relative_path)) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: child_doc.content.clone(), + encoding: None, + }); + } + } + + if let Some(resources) = skill.resources.as_ref() { + for resource in resources { + output_files.push(BaseOutputFileDeclarationDto { + path: skill_sub_dir + .join(&resource.relative_path) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: resource.content.clone(), + encoding: match resource.encoding { + crate::domain::plugin_shared::SkillResourceEncoding::Base64 => { + Some("base64".to_string()) + } + crate::domain::plugin_shared::SkillResourceEncoding::Text => None, + }, + }); + } + } + + if let Some(mcp_config) = skill.mcp_config.as_ref() { + output_files.push(BaseOutputFileDeclarationDto { + path: skill_sub_dir.join("mcp.json").to_string_lossy().into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: mcp_config.raw_content.clone(), + encoding: None, + }); + } +} + +fn resolve_child_doc_output_relative_path(relative_path: &str) -> String { + if let Some(stripped) = relative_path.strip_suffix(".mdx") { + return format!("{stripped}.md"); + } + + relative_path.to_string() +} + fn wrap_yaml_front_matter( metadata: &serde_json::Map, content: &str, @@ -519,6 +601,7 @@ mod tests { content: "body".to_string(), length: 4, skill_name: name.to_string(), + category_name: None, dir: RelativePath::new(name, "/workspace/aindex/skills"), yaml_front_matter: Some(SkillYAMLFrontMatter { description: Some("desc".to_string()), @@ -534,6 +617,19 @@ mod tests { raw_front_matter: None, markdown_ast: None, markdown_contents: None, + }, SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "linux-wsl".to_string(), + length: 9, + file_path_kind: FilePathKind::Relative, + relative_path: "references/linux-wsl.mdx".to_string(), + dir: RelativePath::new( + "references/linux-wsl.mdx", + "/workspace/aindex/skills/test", + ), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, }]), resources: Some(vec![SkillResource { prompt_type: PromptKind::SkillResource, @@ -544,6 +640,24 @@ mod tests { encoding: SkillResourceEncoding::Text, length: 5, mime_type: None, + }, SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "sh".to_string(), + file_name: "capture-workflow.sh".to_string(), + relative_path: "templates/capture-workflow.sh".to_string(), + content: "#!/usr/bin/env bash\necho capture\n".to_string(), + encoding: SkillResourceEncoding::Text, + length: 32, + mime_type: None, + }, SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "bin".to_string(), + file_name: "blob.bin".to_string(), + relative_path: "assets/blob.bin".to_string(), + content: "AAEC".to_string(), + encoding: SkillResourceEncoding::Base64, + length: 3, + mime_type: Some("application/octet-stream".to_string()), }]), mcp_config: Some(SkillMcpConfig { prompt_type: PromptKind::SkillMcpConfig, @@ -555,7 +669,7 @@ mod tests { } #[test] - fn skill_output_only_contains_skill_md() { + fn skill_output_includes_child_docs_resources_and_mcp_config() { let skill = make_test_skill("test-skill"); let context = OutputContext { workspace: Some(Workspace { @@ -591,14 +705,68 @@ mod tests { assert_eq!( skill_paths.len(), - 1, - "should only have SKILL.md, got: {:?}", + 7, + "skill output should include main doc, child docs, resources, and mcp config, got: {:?}", skill_paths ); + assert!(skill_paths.iter().any(|path| path.ends_with("SKILL.md"))); + assert!(skill_paths.iter().any(|path| path.ends_with("guide.md"))); + assert!(skill_paths.iter().any(|path| path.ends_with("references/linux-wsl.md"))); + assert!(skill_paths.iter().any(|path| path.ends_with("assets/notes.txt"))); + assert!(skill_paths.iter().any(|path| path.ends_with("templates/capture-workflow.sh"))); + assert!(skill_paths.iter().any(|path| path.ends_with("assets/blob.bin"))); + assert!(skill_paths.iter().any(|path| path.ends_with("mcp.json"))); + + let binary_resource = plan + .output_files + .iter() + .find(|file| file.path.ends_with("assets/blob.bin")) + .unwrap(); + assert_eq!(binary_resource.encoding.as_deref(), Some("base64")); + } + + #[test] + fn categorized_skill_uses_prefixed_directory_and_source_identifier() { + let mut skill = make_test_skill("reverse-engineering"); + skill.category_name = Some("dev-tools".to_string()); + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new("/workspace"), + projects: vec![Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: "root".to_string(), + length: 4, + file_path_kind: FilePathKind::Root, + dir: RootPath::new("/workspace"), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }), + ..Project::default() + }], + }), + skills: Some(vec![skill]), + ..OutputContext::default() + }; + + let plan = build_codex_output_plan(&context).unwrap(); + let skill_file = plan + .output_files + .iter() + .find(|file| { + file + .path + .contains(".codex/skills/dev-tools-reverse-engineering/SKILL.md") + }) + .unwrap(); + + assert!(skill_file.content.contains("name: dev-tools-reverse-engineering")); assert!( - skill_paths[0].ends_with("SKILL.md"), - "output should be SKILL.md, got: {}", - skill_paths[0] + skill_file.content.contains("skill: aindex/skills/dev-tools/reverse-engineering") ); } } diff --git a/sdk/src/domain/output_plans/droid_output_plan.rs b/sdk/src/domain/output_plans/droid_output_plan.rs index fbfd6004..3a3c35ab 100644 --- a/sdk/src/domain/output_plans/droid_output_plan.rs +++ b/sdk/src/domain/output_plans/droid_output_plan.rs @@ -553,6 +553,13 @@ fn transform_command_name(command: &SlashCommandPrompt) -> String { } fn resolve_skill_dir_name(skill: &SkillPrompt) -> String { + if let Some(category_name) = skill.category_name.as_deref().map(str::trim) + && !category_name.is_empty() + && !skill.skill_name.trim().is_empty() + { + return format!("{category_name}-{}", skill.skill_name); + } + if !skill.skill_name.trim().is_empty() { return skill.skill_name.clone(); } @@ -749,6 +756,7 @@ mod tests { content: "Skill body".to_string(), length: "Skill body".len(), skill_name: name.to_string(), + category_name: None, dir: create_relative_path(project_root, name), yaml_front_matter: Some(SkillYAMLFrontMatter { description: Some("Skill description".to_string()), @@ -1110,6 +1118,7 @@ mod tests { content: "Skill body".to_string(), length: "Skill body".len(), skill_name: "test-skill".to_string(), + category_name: None, dir: RelativePath::new("test-skill", "/workspace"), yaml_front_matter: Some(SkillYAMLFrontMatter { description: Some("Skill description".to_string()), @@ -1183,4 +1192,56 @@ mod tests { skill_paths[0] ); } + + #[test] + fn categorized_skill_uses_prefixed_directory_and_front_matter_name() { + let skill = SkillPrompt { + prompt_type: PromptKind::Skill, + content: "Skill body".to_string(), + length: "Skill body".len(), + skill_name: "test-skill".to_string(), + category_name: Some("tools".to_string()), + dir: RelativePath::new("test-skill", "/workspace"), + yaml_front_matter: Some(SkillYAMLFrontMatter { + description: Some("Skill description".to_string()), + ..SkillYAMLFrontMatter::default() + }), + mcp_config: None, + child_docs: None, + resources: None, + markdown_contents: None, + }; + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new("/workspace"), + projects: vec![Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: "root".to_string(), + length: 4, + file_path_kind: FilePathKind::Root, + dir: RootPath::new("/workspace"), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }), + ..Project::default() + }], + }), + skills: Some(vec![skill]), + ..OutputContext::default() + }; + + let plan = build_droid_output_plan(&context).unwrap(); + let skill_file = plan + .output_files + .iter() + .find(|file| file.path.contains(".factory/skills/tools-test-skill/SKILL.md")) + .unwrap(); + + assert!(skill_file.content.starts_with("---\nname: tools-test-skill\n")); + } } diff --git a/sdk/src/domain/output_plans/opencode_output_plan.rs b/sdk/src/domain/output_plans/opencode_output_plan.rs index 921177b4..eee00b43 100644 --- a/sdk/src/domain/output_plans/opencode_output_plan.rs +++ b/sdk/src/domain/output_plans/opencode_output_plan.rs @@ -15,6 +15,26 @@ const OPENCODE_PROJECT_CONFIG_DIR: &str = ".opencode"; const OPENCODE_GLOBAL_CONFIG_DIR: &str = ".config/opencode"; const PROJECT_SCOPE: &str = "project"; +fn resolve_skill_dir_name(skill: &crate::domain::plugin_shared::SkillPrompt) -> String { + if let Some(category_name) = skill.category_name.as_deref().map(str::trim) + && !category_name.is_empty() + { + return format!("{category_name}-{}", skill.skill_name); + } + + skill.skill_name.clone() +} + +fn build_skill_source_identifier(skill: &crate::domain::plugin_shared::SkillPrompt) -> String { + if let Some(category_name) = skill.category_name.as_deref().map(str::trim) + && !category_name.is_empty() + { + return format!("aindex/skills/{category_name}/{}", skill.skill_name); + } + + format!("aindex/skills/{}", skill.skill_name) +} + pub fn collect_opencode_output_plan(context_json: &str) -> Result { let context = serde_json::from_str::(context_json)?; let plan = build_opencode_output_plan(&context)?; @@ -131,7 +151,7 @@ fn build_output_files( .join(OPENCODE_PROJECT_CONFIG_DIR) .join("skills"); for skill in skills { - let skill_sub_dir = opencode_skills_dir.join(&skill.skill_name); + let skill_sub_dir = opencode_skills_dir.join(resolve_skill_dir_name(skill)); output_files.push(BaseOutputFileDeclarationDto { path: skill_sub_dir @@ -142,6 +162,8 @@ fn build_output_files( content: build_skill_content(skill), encoding: None, }); + + append_skill_supporting_files(&mut output_files, &skill_sub_dir, skill); } } } @@ -287,7 +309,11 @@ fn build_skill_content(skill: &crate::domain::plugin_shared::SkillPrompt) -> Str metadata.insert( "skill".to_string(), - Value::String(format!("aindex/skills/{}", skill.skill_name)), + Value::String(build_skill_source_identifier(skill)), + ); + metadata.insert( + "name".to_string(), + Value::String(resolve_skill_dir_name(skill)), ); metadata.retain(|_, v| { @@ -301,6 +327,62 @@ fn build_skill_content(skill: &crate::domain::plugin_shared::SkillPrompt) -> Str wrap_yaml_front_matter(&metadata, &skill.content) } +fn append_skill_supporting_files( + output_files: &mut Vec, + skill_sub_dir: &std::path::Path, + skill: &crate::domain::plugin_shared::SkillPrompt, +) { + if let Some(child_docs) = skill.child_docs.as_ref() { + for child_doc in child_docs { + output_files.push(BaseOutputFileDeclarationDto { + path: skill_sub_dir + .join(resolve_child_doc_output_relative_path(&child_doc.relative_path)) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: child_doc.content.clone(), + encoding: None, + }); + } + } + + if let Some(resources) = skill.resources.as_ref() { + for resource in resources { + output_files.push(BaseOutputFileDeclarationDto { + path: skill_sub_dir + .join(&resource.relative_path) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: resource.content.clone(), + encoding: match resource.encoding { + crate::domain::plugin_shared::SkillResourceEncoding::Base64 => { + Some("base64".to_string()) + } + crate::domain::plugin_shared::SkillResourceEncoding::Text => None, + }, + }); + } + } + + if let Some(mcp_config) = skill.mcp_config.as_ref() { + output_files.push(BaseOutputFileDeclarationDto { + path: skill_sub_dir.join("mcp.json").to_string_lossy().into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: mcp_config.raw_content.clone(), + encoding: None, + }); + } +} + +fn resolve_child_doc_output_relative_path(relative_path: &str) -> String { + if let Some(stripped) = relative_path.strip_suffix(".mdx") { + return format!("{stripped}.md"); + } + + relative_path.to_string() +} + fn wrap_yaml_front_matter(metadata: &serde_json::Map, content: &str) -> String { let mut metadata = metadata.clone(); normalize_color(&mut metadata); @@ -701,6 +783,7 @@ mod tests { content: "body".to_string(), length: 4, skill_name: name.to_string(), + category_name: None, dir: crate::infra::path_types::RelativePath::new(name, "/workspace/aindex/skills"), yaml_front_matter: Some(SkillYAMLFrontMatter { description: Some("desc".to_string()), @@ -719,6 +802,19 @@ mod tests { raw_front_matter: None, markdown_ast: None, markdown_contents: None, + }, SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "linux-wsl".to_string(), + length: 9, + file_path_kind: crate::infra::path_types::FilePathKind::Relative, + relative_path: "references/linux-wsl.mdx".to_string(), + dir: crate::infra::path_types::RelativePath::new( + "references/linux-wsl.mdx", + "/workspace/aindex/skills/test", + ), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, }]), resources: Some(vec![SkillResource { prompt_type: PromptKind::SkillResource, @@ -729,6 +825,24 @@ mod tests { encoding: SkillResourceEncoding::Text, length: 5, mime_type: None, + }, SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "sh".to_string(), + file_name: "capture-workflow.sh".to_string(), + relative_path: "templates/capture-workflow.sh".to_string(), + content: "#!/usr/bin/env bash\necho capture\n".to_string(), + encoding: SkillResourceEncoding::Text, + length: 32, + mime_type: None, + }, SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "bin".to_string(), + file_name: "blob.bin".to_string(), + relative_path: "assets/blob.bin".to_string(), + content: "AAEC".to_string(), + encoding: SkillResourceEncoding::Base64, + length: 3, + mime_type: Some("application/octet-stream".to_string()), }]), mcp_config: Some(SkillMcpConfig { prompt_type: PromptKind::SkillMcpConfig, @@ -740,7 +854,7 @@ mod tests { } #[test] - fn skill_output_only_contains_skill_md() { + fn skill_output_includes_child_docs_resources_and_mcp_config() { use crate::domain::plugin_shared::*; let skill = make_test_skill("test-skill"); @@ -778,14 +892,70 @@ mod tests { assert_eq!( skill_paths.len(), - 1, - "should only have SKILL.md, got: {:?}", + 7, + "skill output should include main doc, child docs, resources, and mcp config, got: {:?}", skill_paths ); + assert!(skill_paths.iter().any(|path| path.ends_with("SKILL.md"))); + assert!(skill_paths.iter().any(|path| path.ends_with("guide.md"))); + assert!(skill_paths.iter().any(|path| path.ends_with("references/linux-wsl.md"))); + assert!(skill_paths.iter().any(|path| path.ends_with("assets/notes.txt"))); + assert!(skill_paths.iter().any(|path| path.ends_with("templates/capture-workflow.sh"))); + assert!(skill_paths.iter().any(|path| path.ends_with("assets/blob.bin"))); + assert!(skill_paths.iter().any(|path| path.ends_with("mcp.json"))); + + let binary_resource = plan + .output_files + .iter() + .find(|file| file.path.ends_with("assets/blob.bin")) + .unwrap(); + assert_eq!(binary_resource.encoding.as_deref(), Some("base64")); + } + + #[test] + fn categorized_skill_uses_prefixed_directory_and_source_identifier() { + use crate::domain::plugin_shared::*; + + let mut skill = make_test_skill("reverse-engineering"); + skill.category_name = Some("dev-tools".to_string()); + let context = OutputContext { + workspace: Some(Workspace { + directory: RootPath::new("/workspace"), + projects: vec![Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: "root".to_string(), + length: 4, + file_path_kind: FilePathKind::Root, + dir: RootPath::new("/workspace"), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }), + ..Project::default() + }], + }), + skills: Some(vec![skill]), + ..OutputContext::default() + }; + + let plan = build_opencode_output_plan(&context).unwrap(); + let skill_file = plan + .output_files + .iter() + .find(|file| { + file + .path + .contains(".opencode/skills/dev-tools-reverse-engineering/SKILL.md") + }) + .unwrap(); + + assert!(skill_file.content.contains("name: dev-tools-reverse-engineering")); assert!( - skill_paths[0].ends_with("SKILL.md"), - "output should be SKILL.md, got: {}", - skill_paths[0] + skill_file.content.contains("skill: aindex/skills/dev-tools/reverse-engineering") ); } } diff --git a/sdk/src/domain/plugin_shared.rs b/sdk/src/domain/plugin_shared.rs index 431909f8..6dfeb795 100644 --- a/sdk/src/domain/plugin_shared.rs +++ b/sdk/src/domain/plugin_shared.rs @@ -357,6 +357,8 @@ pub struct SkillPrompt { pub content: String, pub length: usize, pub skill_name: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub category_name: Option, pub dir: RelativePath, #[serde(default, skip_serializing_if = "Option::is_none")] pub yaml_front_matter: Option, diff --git a/sdk/src/repositories/skill.rs b/sdk/src/repositories/skill.rs index 7fe30862..81d51c2d 100644 --- a/sdk/src/repositories/skill.rs +++ b/sdk/src/repositories/skill.rs @@ -1,5 +1,5 @@ use std::collections::HashMap; -use std::path::Path; +use std::path::{Path, PathBuf}; use serde::Deserialize; use serde_json::Value; @@ -21,6 +21,20 @@ struct SkillInputOptions { global_scope: Option, } +#[derive(Debug, Clone, PartialEq, Eq)] +struct CollectedSkillDir { + category_name: Option, + skill_name: String, + skill_dir: PathBuf, +} + +#[derive(Debug, Clone)] +struct CategoryDescriptionFiles { + description: Option, + source_path: Option, + compiled_path: Option, +} + fn transform_mdx_references_to_md(content: &str) -> String { let re = regex_lite::Regex::new(r"(!?\[)([^\]]*?)(\]\()([^)]+)(\))").unwrap(); re.replace_all(content, |caps: ®ex_lite::Captures| { @@ -127,6 +141,68 @@ fn extract_skill_metadata_from_export(content: &str) -> Value { Value::Object(metadata) } +fn extract_description_from_exports(content: &str) -> Option { + let default_description_regex = + regex_lite::Regex::new(r#"description\s*:\s*['"`]([^'"`]+)['"`]"#).ok()?; + + let export_default_regex = regex_lite::Regex::new(r"export\s+default\s*\{([\s\S]*?)\}").ok()?; + if let Some(caps) = export_default_regex.captures(content) + && let Some(object_content) = caps.get(1) + && let Some(desc_caps) = default_description_regex.captures(object_content.as_str()) + && let Some(description) = desc_caps.get(1) + { + return Some(description.as_str().trim().to_string()); + } + + let named_export_regex = + regex_lite::Regex::new(r#"export\s+(?:const|let)\s+description\s*=\s*['"`]([^'"`]+)['"`]"#) + .ok()?; + named_export_regex + .captures(content) + .and_then(|caps| caps.get(1)) + .map(|description| description.as_str().trim().to_string()) +} + +fn strip_leading_front_matter(content: &str) -> &str { + let front_matter_regex = + regex_lite::Regex::new(r"(?s)^---\r?\n.*?\r?\n---(?:(?:\r?\n){1,2}|$)").ok(); + if let Some(re) = front_matter_regex + && let Some(matched) = re.find(content) + { + return &content[matched.end()..]; + } + content +} + +fn strip_leading_export_statements(content: &str) -> String { + let export_default_regex = + regex_lite::Regex::new(r"(?s)^\s*export\s+default\s*\{[\s\S]*?\}\s*;?\s*").ok(); + let named_export_regex = + regex_lite::Regex::new(r#"(?m)^\s*export\s+(?:const|let)\s+description\s*=\s*['"`][^'"`]+['"`]\s*;?\s*$\n?"#).ok(); + + let without_default = if let Some(re) = export_default_regex { + re.replace(content, "").into_owned() + } else { + content.to_string() + }; + + if let Some(re) = named_export_regex { + return re.replace_all(&without_default, "").into_owned(); + } + + without_default +} + +fn extract_description_from_markdown_body(content: &str) -> Option { + let without_front_matter = strip_leading_front_matter(content); + let without_exports = strip_leading_export_statements(without_front_matter); + let body = without_exports.trim(); + if body.is_empty() { + return None; + } + Some(body.to_string()) +} + fn merge_defined_skill_metadata(sources: &[Option]) -> Value { let mut merged = serde_json::Map::new(); for source in sources { @@ -141,6 +217,50 @@ fn merge_defined_skill_metadata(sources: &[Option]) -> Value { Value::Object(merged) } +fn read_category_description_files( + category_dir: &Path, +) -> Result, crate::CliError> { + let source_path = category_dir.join("desc.src.mdx"); + let compiled_path = category_dir.join("desc.mdx"); + let has_source = source_path.is_file(); + let has_compiled = compiled_path.is_file(); + + if has_source && !has_compiled { + return Err(crate::CliError::ConfigError(format!( + "Missing compiled prompt for category description \"{}\". source: {} expected compiled: {}", + category_dir + .file_name() + .and_then(|name| name.to_str()) + .unwrap_or_default(), + source_path.to_string_lossy(), + compiled_path.to_string_lossy() + ))); + } + + if !has_source && !has_compiled { + return Ok(None); + } + + let preferred_path = if has_source { + &source_path + } else { + &compiled_path + }; + let content = std::fs::read_to_string(preferred_path).map_err(crate::CliError::IoError)?; + let description = extract_description_from_exports(&content) + .or_else(|| extract_description_from_markdown_body(&content)); + + Ok(Some(CategoryDescriptionFiles { + description, + source_path: if has_source { Some(source_path) } else { None }, + compiled_path: if has_compiled { + Some(compiled_path) + } else { + None + }, + })) +} + const MIME_TYPES: &[(&str, &str)] = &[ (".ts", "text/typescript"), (".tsx", "text/typescript"), @@ -259,7 +379,11 @@ fn scan_child_docs( let Some(file_name) = path.file_name().and_then(|s| s.to_str()) else { continue; }; - if file_name == "skill.mdx" || file_name.ends_with(".src.mdx") || !file_name.ends_with(".mdx") { + if file_name == "skill.mdx" + || file_name == "desc.mdx" + || file_name.ends_with(".src.mdx") + || !file_name.ends_with(".mdx") + { continue; } @@ -371,7 +495,9 @@ fn collect_expected_child_doc_paths( if !file_name.ends_with(".src.mdx") { continue; } - if current_dir == skill_src_dir && file_name == "skill.src.mdx" { + if current_dir == skill_src_dir + && (file_name == "skill.src.mdx" || file_name == "desc.src.mdx") + { continue; } let relative_path = path @@ -501,6 +627,7 @@ fn validate_skill_metadata(metadata: &Value, file_path: &str) -> Result<(), crat } fn create_skill_prompt( + category_name: Option<&str>, name: &str, skill_dir: &Path, global_scope_json: Option<&str>, @@ -570,6 +697,10 @@ fn create_skill_prompt( let length = content.len(); let skill_dir_str = skill_dir.to_string_lossy().into_owned(); + let skill_parent_dir = skill_dir + .parent() + .map(|path| path.to_string_lossy().into_owned()) + .unwrap_or_default(); let yaml_front_matter_typed: Option = serde_json::from_value(final_front_matter.clone()).ok(); @@ -589,7 +720,8 @@ fn create_skill_prompt( content, length, skill_name: name.to_string(), - dir: RelativePath::new(name, &skill_dir_str), + category_name: category_name.map(str::to_string), + dir: RelativePath::new(name, &skill_parent_dir), yaml_front_matter: yaml_front_matter_typed, mcp_config, child_docs: if child_docs.is_empty() { @@ -606,6 +738,89 @@ fn create_skill_prompt( }) } +fn collect_skill_directories(skills_dir: &Path) -> Result, crate::CliError> { + let mut collected = Vec::new(); + let entries = match std::fs::read_dir(skills_dir) { + Ok(entries) => entries, + Err(_) => return Ok(collected), + }; + + for entry in entries.flatten() { + if !entry.file_type().map(|file_type| file_type.is_dir()).unwrap_or(false) { + continue; + } + + let first_level_dir = entry.path(); + let first_level_name = entry.file_name().to_string_lossy().into_owned(); + let has_root_skill = first_level_dir.join("skill.mdx").is_file() + || first_level_dir.join("skill.src.mdx").is_file(); + + let mut nested_skill_dirs = Vec::new(); + let nested_entries = std::fs::read_dir(&first_level_dir).map_err(crate::CliError::IoError)?; + for nested_entry in nested_entries.flatten() { + let nested_path = nested_entry.path(); + if !nested_entry + .file_type() + .map(|file_type| file_type.is_dir()) + .unwrap_or(false) + { + continue; + } + if !nested_path.join("skill.mdx").is_file() && !nested_path.join("skill.src.mdx").is_file() { + continue; + } + nested_skill_dirs.push(nested_entry); + } + + if has_root_skill && !nested_skill_dirs.is_empty() { + return Err(crate::CliError::ConfigError(format!( + "Ambiguous skill layout in {}: directory cannot define both a root skill and nested categorized skills", + first_level_dir.to_string_lossy() + ))); + } + + if has_root_skill { + collected.push(CollectedSkillDir { + category_name: None, + skill_name: first_level_name, + skill_dir: first_level_dir, + }); + continue; + } + + if nested_skill_dirs.is_empty() { + continue; + } + + if let Some(description_files) = read_category_description_files(&first_level_dir)? { + let _ = description_files.description.as_deref(); + let _ = description_files.source_path.as_ref(); + let _ = description_files.compiled_path.as_ref(); + } + + for nested_skill_dir in nested_skill_dirs { + let skill_name = nested_skill_dir.file_name().to_string_lossy().into_owned(); + collected.push(CollectedSkillDir { + category_name: Some(first_level_name.clone()), + skill_name, + skill_dir: nested_skill_dir.path(), + }); + } + } + + collected.sort_by(|left, right| { + left + .category_name + .cmp(&right.category_name) + .then(left.skill_name.cmp(&right.skill_name)) + }); + collected.dedup_by(|left, right| { + left.category_name == right.category_name && left.skill_name == right.skill_name + }); + + Ok(collected) +} + pub fn collect_skill(options_json: &str) -> Result { let options: SkillInputOptions = serde_json::from_str(options_json).map_err(|e| crate::CliError::ConfigError(e.to_string()))?; @@ -617,33 +832,23 @@ pub fn collect_skill(options_json: &str) -> Result { let global_scope_json = options.global_scope.as_ref().map(|v| v.to_string()); let mut skills: Vec = Vec::new(); + let collected_skill_dirs = if skills_dir.is_dir() { + collect_skill_directories(&skills_dir)? + } else { + Vec::new() + }; - let mut skill_names: Vec = Vec::new(); - - if skills_dir.is_dir() - && let Ok(entries) = std::fs::read_dir(&skills_dir) - { - for entry in entries.flatten() { - if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { - skill_names.push(entry.file_name().to_string_lossy().into_owned()); - } - } - } - - if skill_names.is_empty() { + if collected_skill_dirs.is_empty() { return Ok("{\"skills\":[]}".to_string()); } - skill_names.sort(); - skill_names.dedup(); - let mut diagnostics: Vec = Vec::new(); - for skill_name in skill_names { - let skill_dir = skills_dir.join(&skill_name); + for collected_skill_dir in collected_skill_dirs { let prompt = create_skill_prompt( - &skill_name, - &skill_dir, + collected_skill_dir.category_name.as_deref(), + &collected_skill_dir.skill_name, + &collected_skill_dir.skill_dir, global_scope_json.as_deref(), &mut diagnostics, )?; @@ -1053,4 +1258,298 @@ mod tests { .any(|d| d["code"] == "SKILL_NAME_IGNORED") ); } + + #[test] + fn collect_skill_reads_categorized_skill_and_skips_category_desc_files() { + let tmp = TempDir::new().unwrap(); + let category_dir = tmp.path().join("aindex").join("skills").join("tools"); + let skill_dir = category_dir.join("demo"); + fs::create_dir_all(&skill_dir).unwrap(); + + fs::write( + category_dir.join("desc.src.mdx"), + "---\n---\nexport const description = \"Tooling category\"\n\n# Tools", + ) + .unwrap(); + fs::write(category_dir.join("desc.mdx"), "# Tools").unwrap(); + fs::write( + skill_dir.join("skill.src.mdx"), + "---\ndescription: src skill\n---\nSkill source", + ) + .unwrap(); + fs::write( + skill_dir.join("guide.src.mdx"), + "---\ndescription: src guide\n---\nGuide source", + ) + .unwrap(); + fs::write( + skill_dir.join("skill.mdx"), + "---\ndescription: compiled skill\n---\nSkill compiled", + ) + .unwrap(); + fs::write( + skill_dir.join("guide.mdx"), + "---\ndescription: compiled guide\n---\nGuide compiled", + ) + .unwrap(); + + let options = serde_json::json!({ + "workspaceDir": tmp.path().to_string_lossy().to_string(), + }); + + let result = collect_skill(&options.to_string()).unwrap(); + let parsed: serde_json::Value = serde_json::from_str(&result).unwrap(); + let skill = &parsed["skills"][0]; + + assert_eq!(skill["categoryName"], "tools"); + assert_eq!(skill["skillName"], "demo"); + assert_eq!(skill["dir"]["path"], "demo"); + assert!( + skill["dir"]["basePath"] + .as_str() + .unwrap() + .replace('\\', "/") + .ends_with("/aindex/skills/tools") + ); + + let child_paths: Vec = skill["childDocs"] + .as_array() + .unwrap() + .iter() + .map(|doc| doc["relativePath"].as_str().unwrap().to_string()) + .collect(); + assert_eq!(child_paths, vec!["guide.mdx"]); + } + + #[test] + fn collect_skill_reads_all_nested_files_for_categorized_skill() { + let tmp = TempDir::new().unwrap(); + let skill_dir = tmp + .path() + .join("aindex") + .join("skills") + .join("browser") + .join("agent-browser"); + fs::create_dir_all(skill_dir.join("references")).unwrap(); + fs::create_dir_all(skill_dir.join("templates")).unwrap(); + fs::create_dir_all(skill_dir.join("assets")).unwrap(); + + fs::write( + skill_dir.join("skill.src.mdx"), + "---\ndescription: src skill\n---\nBrowser source", + ) + .unwrap(); + fs::write( + skill_dir.join("skill.mdx"), + "---\ndescription: compiled skill\n---\nBrowser compiled", + ) + .unwrap(); + + for name in ["linux-wsl", "authentication"] { + fs::write( + skill_dir.join("references").join(format!("{name}.src.mdx")), + format!("---\ndescription: {name}\n---\n{name} source"), + ) + .unwrap(); + fs::write( + skill_dir.join("references").join(format!("{name}.mdx")), + format!("---\ndescription: {name}\n---\n{name} compiled"), + ) + .unwrap(); + } + + fs::write( + skill_dir.join("templates").join("capture-workflow.sh"), + "#!/usr/bin/env bash\necho capture\n", + ) + .unwrap(); + fs::write( + skill_dir.join("templates").join("authenticated-session.sh"), + "#!/usr/bin/env bash\necho auth\n", + ) + .unwrap(); + fs::write(skill_dir.join("assets").join("logo.png"), [0x89_u8, 0x50, 0x4E, 0x47]).unwrap(); + fs::write( + skill_dir.join("mcp.json"), + r#"{"mcpServers":{"browser":{"command":"agent-browser"}}}"#, + ) + .unwrap(); + + let options = serde_json::json!({ + "workspaceDir": tmp.path().to_string_lossy().to_string(), + }); + + let result = collect_skill(&options.to_string()).unwrap(); + let parsed: serde_json::Value = serde_json::from_str(&result).unwrap(); + let skill = &parsed["skills"][0]; + + assert_eq!(skill["categoryName"], "browser"); + assert_eq!(skill["skillName"], "agent-browser"); + + let child_paths: std::collections::HashSet = skill["childDocs"] + .as_array() + .unwrap() + .iter() + .map(|doc| doc["relativePath"].as_str().unwrap().to_string()) + .collect(); + assert_eq!( + child_paths, + std::collections::HashSet::from([ + "references/linux-wsl.mdx".to_string(), + "references/authentication.mdx".to_string(), + ]) + ); + + let resource_paths: std::collections::HashSet = skill["resources"] + .as_array() + .unwrap() + .iter() + .map(|resource| resource["relativePath"].as_str().unwrap().to_string()) + .collect(); + assert_eq!( + resource_paths, + std::collections::HashSet::from([ + "templates/capture-workflow.sh".to_string(), + "templates/authenticated-session.sh".to_string(), + "assets/logo.png".to_string(), + ]) + ); + + let logo = skill["resources"] + .as_array() + .unwrap() + .iter() + .find(|resource| resource["relativePath"] == "assets/logo.png") + .unwrap(); + assert_eq!(logo["encoding"], "base64"); + assert_eq!(skill["mcpConfig"]["mcpServers"]["browser"]["command"], "agent-browser"); + } + + #[test] + fn collect_skill_supports_legacy_and_categorized_layouts_together() { + let tmp = TempDir::new().unwrap(); + let legacy_dir = tmp.path().join("aindex").join("skills").join("legacy"); + let category_skill_dir = tmp + .path() + .join("aindex") + .join("skills") + .join("tools") + .join("demo"); + fs::create_dir_all(&legacy_dir).unwrap(); + fs::create_dir_all(&category_skill_dir).unwrap(); + + fs::write( + legacy_dir.join("skill.src.mdx"), + "---\ndescription: src legacy\n---\nLegacy source", + ) + .unwrap(); + fs::write( + legacy_dir.join("skill.mdx"), + "---\ndescription: compiled legacy\n---\nLegacy compiled", + ) + .unwrap(); + fs::write( + category_skill_dir.join("skill.src.mdx"), + "---\ndescription: src categorized\n---\nCategorized source", + ) + .unwrap(); + fs::write( + category_skill_dir.join("skill.mdx"), + "---\ndescription: compiled categorized\n---\nCategorized compiled", + ) + .unwrap(); + + let options = serde_json::json!({ + "workspaceDir": tmp.path().to_string_lossy().to_string(), + }); + + let result = collect_skill(&options.to_string()).unwrap(); + let parsed: serde_json::Value = serde_json::from_str(&result).unwrap(); + let skills = parsed["skills"].as_array().unwrap(); + + assert_eq!(skills.len(), 2); + assert_eq!(skills[0]["skillName"], "legacy"); + assert!(skills[0]["categoryName"].is_null()); + assert_eq!(skills[1]["categoryName"], "tools"); + assert_eq!(skills[1]["skillName"], "demo"); + } + + #[test] + fn collect_skill_fails_on_ambiguous_mixed_skill_layout() { + let tmp = TempDir::new().unwrap(); + let ambiguous_dir = tmp.path().join("aindex").join("skills").join("tools"); + let nested_skill_dir = ambiguous_dir.join("demo"); + fs::create_dir_all(&nested_skill_dir).unwrap(); + + fs::write( + ambiguous_dir.join("skill.src.mdx"), + "---\ndescription: src root skill\n---\nRoot source", + ) + .unwrap(); + fs::write( + ambiguous_dir.join("skill.mdx"), + "---\ndescription: compiled root skill\n---\nRoot compiled", + ) + .unwrap(); + fs::write( + nested_skill_dir.join("skill.src.mdx"), + "---\ndescription: src nested skill\n---\nNested source", + ) + .unwrap(); + fs::write( + nested_skill_dir.join("skill.mdx"), + "---\ndescription: compiled nested skill\n---\nNested compiled", + ) + .unwrap(); + + let options = serde_json::json!({ + "workspaceDir": tmp.path().to_string_lossy().to_string(), + }); + + let result = collect_skill(&options.to_string()); + assert!(result.is_err()); + assert!( + result + .unwrap_err() + .to_string() + .contains("Ambiguous skill layout") + ); + } + + #[test] + fn collect_skill_fails_missing_category_desc_dist() { + let tmp = TempDir::new().unwrap(); + let category_dir = tmp.path().join("aindex").join("skills").join("tools"); + let skill_dir = category_dir.join("demo"); + fs::create_dir_all(&skill_dir).unwrap(); + + fs::write( + category_dir.join("desc.src.mdx"), + "export const description = \"Tooling category\"", + ) + .unwrap(); + fs::write( + skill_dir.join("skill.src.mdx"), + "---\ndescription: src categorized\n---\nCategorized source", + ) + .unwrap(); + fs::write( + skill_dir.join("skill.mdx"), + "---\ndescription: compiled categorized\n---\nCategorized compiled", + ) + .unwrap(); + + let options = serde_json::json!({ + "workspaceDir": tmp.path().to_string_lossy().to_string(), + }); + + let result = collect_skill(&options.to_string()); + assert!(result.is_err()); + assert!( + result + .unwrap_err() + .to_string() + .contains("Missing compiled prompt for category description") + ); + } } diff --git a/sdk/src/services/prompt_service.rs b/sdk/src/services/prompt_service.rs index 93afadad..12d69762 100644 --- a/sdk/src/services/prompt_service.rs +++ b/sdk/src/services/prompt_service.rs @@ -187,6 +187,7 @@ struct PromptIdDescriptor { series_name: Option, project_name: Option, relative_name: Option, + skill_category_name: Option, skill_name: Option, } @@ -197,6 +198,7 @@ impl Default for PromptIdDescriptor { series_name: None, project_name: None, relative_name: None, + skill_category_name: None, skill_name: None, } } @@ -241,6 +243,35 @@ fn is_single_segment_identifier(value: &str) -> bool { !normalize_slash_path(value).contains('/') } +fn parse_skill_identifier( + value: &str, + field_name: &str, +) -> Result<(Option, String, String), String> { + let normalized = normalize_relative_identifier(value, field_name)?; + let segments: Vec<&str> = normalized.split('/').collect(); + match segments.as_slice() { + [skill_name] => Ok((None, (*skill_name).to_string(), normalized)), + [category_name, skill_name] => Ok(( + Some((*category_name).to_string()), + (*skill_name).to_string(), + normalized, + )), + _ => Err(format!( + "{} must include one skill name or /", + field_name + )), + } +} + +fn build_skill_identifier(category_name: Option<&str>, skill_name: &str) -> String { + match category_name { + Some(category_name) if !category_name.is_empty() => { + format!("{category_name}/{skill_name}") + } + _ => skill_name.to_string(), + } +} + fn is_aindex_project_series_name(name: &str) -> bool { matches!(name, "app" | "ext" | "arch" | "softwares") } @@ -419,10 +450,7 @@ fn build_skill_definition( env: &ResolvedPromptEnvironment, skill_name: &str, ) -> Result { - let normalized = normalize_relative_identifier(skill_name, "skillName")?; - if !is_single_segment_identifier(&normalized) { - return Err("skillName must be a single path segment".to_string()); - } + let (_, _, normalized) = parse_skill_identifier(skill_name, "skillName")?; let dir_name = DEFAULT_SKILLS_DIR; let source_dir = env.aindex_dir.join(dir_name).join(&normalized); Ok(PromptDefinition { @@ -451,11 +479,8 @@ fn build_skill_child_doc_definition( skill_name: &str, relative_name: &str, ) -> Result { - let normalized_skill = normalize_relative_identifier(skill_name, "skillName")?; + let (_, _, normalized_skill) = parse_skill_identifier(skill_name, "skillName")?; let normalized_relative = normalize_relative_identifier(relative_name, "relativeName")?; - if !is_single_segment_identifier(&normalized_skill) { - return Err("skillName must be a single path segment".to_string()); - } let source_dir = env .aindex_dir .join(DEFAULT_SKILLS_DIR) @@ -556,24 +581,34 @@ fn parse_prompt_id(prompt_id: &str) -> Result { parse_project_prompt_descriptor(ManagedPromptKind::ProjectChildMemory, &normalized_value) } "skill" => { - if !is_single_segment_identifier(&normalized_value) { - return Err("skill promptId must include a single skill name".to_string()); - } + let (skill_category_name, skill_name, _) = + parse_skill_identifier(&normalized_value, "promptId")?; Ok(PromptIdDescriptor { kind: ManagedPromptKind::Skill, - skill_name: Some(normalized_value), + skill_category_name, + skill_name: Some(skill_name), ..Default::default() }) } "skill-child-doc" => { - let parts: Vec<&str> = normalized_value.splitn(2, '/').collect(); - if parts.len() != 2 { + let parts: Vec<&str> = normalized_value.split('/').collect(); + if parts.len() < 2 { return Err("skill-child-doc promptId must include skill and child path".to_string()); } + let (skill_category_name, skill_name, relative_name) = if parts.len() == 2 { + (None, parts[0].to_string(), parts[1].to_string()) + } else { + ( + Some(parts[0].to_string()), + parts[1].to_string(), + parts[2..].join("/"), + ) + }; Ok(PromptIdDescriptor { kind: ManagedPromptKind::SkillChildDoc, - skill_name: Some(parts[0].to_string()), - relative_name: Some(parts[1].to_string()), + skill_category_name, + skill_name: Some(skill_name), + relative_name: Some(relative_name), ..Default::default() }) } @@ -684,22 +719,52 @@ fn collect_flat_prompt_ids( fn collect_skill_prompt_ids(env: &ResolvedPromptEnvironment) -> Vec { let root = env.aindex_dir.join(DEFAULT_SKILLS_DIR); + let mut prompt_ids = Vec::new(); + if !root.is_dir() { + return prompt_ids; + } + let mut skill_names = BTreeSet::new(); - if root.is_dir() { - for e in fs::read_dir(&root).into_iter().flatten().flatten() { - if e.file_type().map(|t| t.is_dir()).unwrap_or(false) { - skill_names.insert(e.file_name().to_string_lossy().to_string()); + for entry in fs::read_dir(&root).into_iter().flatten().flatten() { + if !entry.file_type().map(|file_type| file_type.is_dir()).unwrap_or(false) { + continue; + } + + let first_level_dir = entry.path(); + let first_level_name = entry.file_name().to_string_lossy().to_string(); + let has_root_skill = first_level_dir.join("skill.mdx").is_file() + || first_level_dir.join("skill.src.mdx").is_file(); + + if has_root_skill { + skill_names.insert(first_level_name); + continue; + } + + for nested_entry in fs::read_dir(&first_level_dir).into_iter().flatten().flatten() { + let nested_path = nested_entry.path(); + if !nested_entry + .file_type() + .map(|file_type| file_type.is_dir()) + .unwrap_or(false) + { + continue; } + if !nested_path.join("skill.mdx").is_file() && !nested_path.join("skill.src.mdx").is_file() { + continue; + } + + let nested_name = nested_entry.file_name().to_string_lossy().to_string(); + skill_names.insert(format!("{}/{}", first_level_name, nested_name)); } } - let mut prompt_ids = Vec::new(); + for skill_name in skill_names { prompt_ids.push(format!("skill:{}", skill_name)); let skill_dir = root.join(&skill_name); let mut child_names = BTreeSet::new(); for file in list_files(&skill_dir, &[SOURCE_PROMPT_EXTENSION, MDX_EXTENSION]) { let stripped = strip_prompt_extension(&file); - if stripped == SKILL_ENTRY_FILE_NAME { + if stripped == SKILL_ENTRY_FILE_NAME || stripped == "desc" { continue; } child_names.insert(stripped); @@ -1031,18 +1096,24 @@ fn build_prompt_definition_from_id( ManagedPromptKind::Skill => { let skill_name = descriptor .skill_name + .as_deref() .ok_or("skill promptId must include a skill name")?; - build_skill_definition(env, &skill_name) + let skill_identifier = + build_skill_identifier(descriptor.skill_category_name.as_deref(), skill_name); + build_skill_definition(env, &skill_identifier) } ManagedPromptKind::SkillChildDoc => { let skill_name = descriptor .skill_name + .as_deref() .ok_or("skill-child-doc promptId must include skill and child path")?; + let skill_identifier = + build_skill_identifier(descriptor.skill_category_name.as_deref(), skill_name); let relative_name = descriptor .relative_name .as_deref() .ok_or("skill-child-doc promptId must include skill and child path")?; - build_skill_child_doc_definition(env, &skill_name, relative_name) + build_skill_child_doc_definition(env, &skill_identifier, relative_name) } ManagedPromptKind::Command | ManagedPromptKind::Subagent | ManagedPromptKind::Rule => { let relative_name = descriptor.relative_name.as_deref().ok_or_else(|| { @@ -1142,3 +1213,88 @@ pub fn write_prompt_artifacts(input: &WritePromptArtifactsInput) -> Result ResolvedPromptEnvironment { + ResolvedPromptEnvironment { + _workspace_dir: tmp.path().to_string_lossy().to_string(), + aindex_dir: tmp.path().join("aindex"), + } + } + + #[test] + fn parse_prompt_id_accepts_categorized_skill_ids() { + let skill = parse_prompt_id("skill:tools/demo").unwrap(); + assert_eq!(skill.kind, ManagedPromptKind::Skill); + assert_eq!(skill.skill_category_name.as_deref(), Some("tools")); + assert_eq!(skill.skill_name.as_deref(), Some("demo")); + + let child = parse_prompt_id("skill-child-doc:tools/demo/guides/setup").unwrap(); + assert_eq!(child.kind, ManagedPromptKind::SkillChildDoc); + assert_eq!(child.skill_category_name.as_deref(), Some("tools")); + assert_eq!(child.skill_name.as_deref(), Some("demo")); + assert_eq!(child.relative_name.as_deref(), Some("guides/setup")); + } + + #[test] + fn build_prompt_definition_from_id_supports_categorized_skills() { + let tmp = TempDir::new().unwrap(); + let env = make_env(&tmp); + + let skill = build_prompt_definition_from_id("skill:tools/demo", &env).unwrap(); + assert_eq!(skill.prompt_id, "skill:tools/demo"); + assert!(skill.paths.zh.ends_with("aindex/skills/tools/demo/skill.src.mdx")); + assert!(skill.paths.en.ends_with("aindex/skills/tools/demo/skill.mdx")); + + let child = + build_prompt_definition_from_id("skill-child-doc:tools/demo/guides/setup", &env).unwrap(); + assert_eq!(child.prompt_id, "skill-child-doc:tools/demo/guides/setup"); + assert!( + child + .paths + .zh + .ends_with("aindex/skills/tools/demo/guides/setup.src.mdx") + ); + assert!( + child + .paths + .en + .ends_with("aindex/skills/tools/demo/guides/setup.mdx") + ); + } + + #[test] + fn collect_skill_prompt_ids_discovers_legacy_and_categorized_skills() { + let tmp = TempDir::new().unwrap(); + let env = make_env(&tmp); + let legacy_dir = env.aindex_dir.join("skills").join("legacy"); + let categorized_dir = env.aindex_dir.join("skills").join("tools").join("demo"); + fs::create_dir_all(&legacy_dir).unwrap(); + fs::create_dir_all(categorized_dir.join("guides")).unwrap(); + + fs::write(legacy_dir.join("skill.mdx"), "Legacy").unwrap(); + fs::write(legacy_dir.join("guide.mdx"), "Legacy guide").unwrap(); + fs::write(env.aindex_dir.join("skills").join("tools").join("desc.mdx"), "Tools").unwrap(); + fs::write(categorized_dir.join("skill.mdx"), "Categorized").unwrap(); + fs::write(categorized_dir.join("guides").join("setup.mdx"), "Setup").unwrap(); + + let prompt_ids = collect_skill_prompt_ids(&env); + + assert!(prompt_ids.contains(&"skill:legacy".to_string())); + assert!(prompt_ids.contains(&"skill-child-doc:legacy/guide".to_string())); + assert!(prompt_ids.contains(&"skill:tools/demo".to_string())); + assert!(prompt_ids.contains(&"skill-child-doc:tools/demo/guides/setup".to_string())); + assert!( + !prompt_ids + .iter() + .any(|prompt_id| prompt_id.contains("desc")), + "desc files must not produce prompt ids: {:?}", + prompt_ids + ); + } +} From b9e781045187afeba64f7dbbb6d4cc04aa35e597 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sat, 2 May 2026 00:23:53 +0800 Subject: [PATCH 35/45] refactor: consolidate resolve_effective_home_dir into shared module to eliminate duplication --- .../output_plans/claude_code_output_plan.rs | 10 +----- .../domain/output_plans/codex_output_plan.rs | 10 +----- .../domain/output_plans/droid_output_plan.rs | 10 +----- .../domain/output_plans/gemini_output_plan.rs | 10 +----- sdk/src/domain/output_plans/mod.rs | 33 +++++++++++++++++++ .../output_plans/opencode_output_plan.rs | 10 +----- sdk/src/domain/output_plans/shared.rs | 12 +++++++ 7 files changed, 50 insertions(+), 45 deletions(-) create mode 100644 sdk/src/domain/output_plans/shared.rs diff --git a/sdk/src/domain/output_plans/claude_code_output_plan.rs b/sdk/src/domain/output_plans/claude_code_output_plan.rs index d2af8337..1342dfd1 100644 --- a/sdk/src/domain/output_plans/claude_code_output_plan.rs +++ b/sdk/src/domain/output_plans/claude_code_output_plan.rs @@ -5,8 +5,8 @@ use serde_json::Value; use crate::CliError; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; -use crate::domain::config; use crate::domain::output_context::OutputContext; +use crate::domain::output_plans::shared::resolve_effective_home_dir; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; const CLAUDE_CODE_PLUGIN_NAME: &str = "ClaudeCodeCLIOutputAdaptor"; @@ -776,14 +776,6 @@ fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { } } -fn resolve_effective_home_dir() -> PathBuf { - let runtime_environment = config::resolve_runtime_environment(); - runtime_environment - .effective_home_dir - .or(runtime_environment.native_home_dir) - .unwrap_or_else(|| PathBuf::from("/")) -} - fn get_concrete_projects(workspace: &Workspace) -> impl Iterator { workspace .projects diff --git a/sdk/src/domain/output_plans/codex_output_plan.rs b/sdk/src/domain/output_plans/codex_output_plan.rs index 37de4620..299cd5d1 100644 --- a/sdk/src/domain/output_plans/codex_output_plan.rs +++ b/sdk/src/domain/output_plans/codex_output_plan.rs @@ -20,8 +20,8 @@ use std::path::PathBuf; use crate::CliError; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; -use crate::domain::config; use crate::domain::output_context::OutputContext; +use crate::domain::output_plans::shared::resolve_effective_home_dir; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; const CODEX_PLUGIN_NAME: &str = "CodexCLIOutputAdaptor"; @@ -541,14 +541,6 @@ fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { } } -fn resolve_effective_home_dir() -> PathBuf { - let runtime_environment = config::resolve_runtime_environment(); - runtime_environment - .effective_home_dir - .or(runtime_environment.native_home_dir) - .unwrap_or_else(|| PathBuf::from("/")) -} - fn get_concrete_projects(workspace: &Workspace) -> impl Iterator { workspace .projects diff --git a/sdk/src/domain/output_plans/droid_output_plan.rs b/sdk/src/domain/output_plans/droid_output_plan.rs index 3a3c35ab..ab98de14 100644 --- a/sdk/src/domain/output_plans/droid_output_plan.rs +++ b/sdk/src/domain/output_plans/droid_output_plan.rs @@ -6,8 +6,8 @@ use serde_json::{Map, Value}; use crate::CliError; use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; -use crate::domain::config; use crate::domain::output_context::OutputContext; +use crate::domain::output_plans::shared::resolve_effective_home_dir; use crate::domain::plugin_shared::{ Project, RelativePath, RuleScope, SkillPrompt, SlashCommandPrompt, Workspace, }; @@ -287,14 +287,6 @@ fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { } } -fn resolve_effective_home_dir() -> PathBuf { - let runtime_environment = config::resolve_runtime_environment(); - runtime_environment - .effective_home_dir - .or(runtime_environment.native_home_dir) - .unwrap_or_else(|| PathBuf::from("/")) -} - fn get_concrete_projects(workspace: &Workspace) -> Vec<&Project> { workspace .projects diff --git a/sdk/src/domain/output_plans/gemini_output_plan.rs b/sdk/src/domain/output_plans/gemini_output_plan.rs index ea7b4c69..42ec347b 100644 --- a/sdk/src/domain/output_plans/gemini_output_plan.rs +++ b/sdk/src/domain/output_plans/gemini_output_plan.rs @@ -4,8 +4,8 @@ use std::path::PathBuf; use crate::CliError; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; -use crate::domain::config; use crate::domain::output_context::OutputContext; +use crate::domain::output_plans::shared::resolve_effective_home_dir; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; const GEMINI_PLUGIN_NAME: &str = "GeminiCLIOutputAdaptor"; @@ -148,14 +148,6 @@ fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { } } -fn resolve_effective_home_dir() -> PathBuf { - let runtime_environment = config::resolve_runtime_environment(); - runtime_environment - .effective_home_dir - .or(runtime_environment.native_home_dir) - .unwrap_or_else(|| PathBuf::from("/")) -} - fn get_project_output_projects(workspace: &Workspace) -> Vec<&Project> { let mut projects = workspace .projects diff --git a/sdk/src/domain/output_plans/mod.rs b/sdk/src/domain/output_plans/mod.rs index ab712937..37b22868 100644 --- a/sdk/src/domain/output_plans/mod.rs +++ b/sdk/src/domain/output_plans/mod.rs @@ -8,6 +8,39 @@ pub mod jetbrains_ai_assistant_codex_output_plan; pub mod kiro_output_plan; pub mod opencode_output_plan; pub mod qoder_output_plan; +pub mod shared; pub mod trae_output_plan; pub mod warp_output_plan; pub mod windsurf_output_plan; + +#[cfg(test)] +mod regression_tests { + use std::fs; + use std::path::Path; + + #[test] + fn resolve_effective_home_dir_is_not_redefined_in_each_output_plan() { + // 修复 #378:把 5 份重复的 `resolve_effective_home_dir()` 收口到公共 helper, + // 这里用回归测试锁住 output plan 里不再各自定义它。 + let output_plans_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("src/domain/output_plans"); + let duplicate_definitions = [ + "claude_code_output_plan.rs", + "codex_output_plan.rs", + "gemini_output_plan.rs", + "opencode_output_plan.rs", + "droid_output_plan.rs", + ] + .iter() + .filter(|file_name| { + fs::read_to_string(output_plans_dir.join(file_name)) + .expect("output plan source should be readable") + .contains("fn resolve_effective_home_dir()") + }) + .count(); + + assert_eq!( + duplicate_definitions, 0, + "resolve_effective_home_dir should be defined only once outside the output plan files" + ); + } +} diff --git a/sdk/src/domain/output_plans/opencode_output_plan.rs b/sdk/src/domain/output_plans/opencode_output_plan.rs index eee00b43..431267d4 100644 --- a/sdk/src/domain/output_plans/opencode_output_plan.rs +++ b/sdk/src/domain/output_plans/opencode_output_plan.rs @@ -5,8 +5,8 @@ use serde_json::Value; use crate::CliError; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; -use crate::domain::config; use crate::domain::output_context::OutputContext; +use crate::domain::output_plans::shared::resolve_effective_home_dir; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; const OPENCODE_PLUGIN_NAME: &str = "OpencodeCLIOutputAdaptor"; @@ -570,14 +570,6 @@ fn is_valid_hex_color(s: &str) -> bool { bytes[1..].iter().all(|&b| b.is_ascii_hexdigit()) } -fn resolve_effective_home_dir() -> PathBuf { - let runtime_environment = config::resolve_runtime_environment(); - runtime_environment - .effective_home_dir - .or(runtime_environment.native_home_dir) - .unwrap_or_else(|| PathBuf::from("/")) -} - fn get_concrete_projects(workspace: &Workspace) -> impl Iterator { workspace .projects diff --git a/sdk/src/domain/output_plans/shared.rs b/sdk/src/domain/output_plans/shared.rs new file mode 100644 index 00000000..de22f488 --- /dev/null +++ b/sdk/src/domain/output_plans/shared.rs @@ -0,0 +1,12 @@ +use std::path::PathBuf; + +use crate::domain::config; + +/// 修复 #378:将各 output plan 重复的有效 home 目录解析统一到这里。 +pub(crate) fn resolve_effective_home_dir() -> PathBuf { + let runtime_environment = config::resolve_runtime_environment(); + runtime_environment + .effective_home_dir + .or(runtime_environment.native_home_dir) + .unwrap_or_else(|| PathBuf::from("/")) +} From b3d9919deda63d6892ff46233d59cb2de4a52aa9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sat, 2 May 2026 00:36:59 +0800 Subject: [PATCH 36/45] fix: ensure log level flags are mutually exclusive to prevent conflicts --- cli/src/cli.rs | 42 +++++++++++++++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 7 deletions(-) diff --git a/cli/src/cli.rs b/cli/src/cli.rs index 60dc4179..2773f5a7 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -18,24 +18,45 @@ pub struct Cli { #[command(subcommand)] pub command: Option, + // 修复 #375:这些日志级别 flag 必须互斥,避免同时传入时出现不透明的覆盖行为。 /// Set log level to trace (most verbose) - #[arg(long = "trace", global = true)] + #[arg( + long = "trace", + global = true, + conflicts_with_all = ["debug", "info", "warn", "error"] + )] pub trace: bool, /// Set log level to debug - #[arg(long = "debug", global = true)] + #[arg( + long = "debug", + global = true, + conflicts_with_all = ["trace", "info", "warn", "error"] + )] pub debug: bool, /// Set log level to info - #[arg(long = "info", global = true)] + #[arg( + long = "info", + global = true, + conflicts_with_all = ["trace", "debug", "warn", "error"] + )] pub info: bool, /// Set log level to warn - #[arg(long = "warn", global = true)] + #[arg( + long = "warn", + global = true, + conflicts_with_all = ["trace", "debug", "info", "error"] + )] pub warn: bool, /// Set log level to error - #[arg(long = "error", global = true)] + #[arg( + long = "error", + global = true, + conflicts_with_all = ["trace", "debug", "info", "warn"] + )] pub error: bool, } @@ -81,7 +102,6 @@ pub struct AssembleNpmArgs { } /// Resolved log level from CLI flags. -/// When multiple flags are provided, the most verbose wins. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ResolvedLogLevel { Trace, @@ -115,7 +135,6 @@ impl ResolvedLogLevel { } /// Resolve log level from CLI flags. -/// When multiple flags are set, the most verbose (lowest priority number) wins. pub fn resolve_log_level(cli: &Cli) -> Option { let mut levels = Vec::new(); if cli.trace { @@ -176,6 +195,7 @@ pub fn resolve_command(cli: &Cli) -> ResolvedCommand { #[cfg(test)] mod tests { use super::*; + use clap::error::ErrorKind; use clap::Parser; #[test] @@ -195,4 +215,12 @@ mod tests { let cli = Cli::parse_from(["tnmsc", "clean", "--dry-run"]); assert_eq!(resolve_command(&cli), ResolvedCommand::DryRunClean); } + + #[test] + fn log_level_flags_reject_multiple_values() { + // 修复 #375 的回归测试:同时传入多个日志级别 flag 时应当直接报错。 + let result = Cli::try_parse_from(["tnmsc", "--trace", "--debug"]); + let error = result.expect_err("expected clap to reject conflicting log level flags"); + assert_eq!(error.kind(), ErrorKind::ArgumentConflict); + } } From b4c7125afecb776408527d7a2bd3be3631d09ba0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 3 May 2026 00:22:50 +0800 Subject: [PATCH 37/45] docs: update README and SECURITY.md for improved clarity on asset management and security boundaries --- README.md | 12 ++++-------- SECURITY.md | 4 ++-- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 1b73a540..62312c5b 100644 --- a/README.md +++ b/README.md @@ -7,14 +7,13 @@ A rat carries even its own memories when moving. `memory-sync` is that kind of t ## What It Does - Treat `.mdx` / `.src.mdx` as the single source of truth; generate native configs and managed artifacts for each tool from one source -- Unified input asset model: Memory, Skills, Commands, Sub-agents, Rules, README, etc. +- Unified input asset model: Memory, Skills, Skill categories, Commands, Sub-agents, Rules, README, etc. - Auto-write configs for each tool: AGENTS.md, Claude Code, Codex CLI, Cursor, Windsurf, Qoder, Trae, Warp, JetBrains AI, etc. -- Manage derived artifacts: prompt outputs, skills exports, README-class outputs +- Manage derived artifacts: prompt outputs, skills exports organized as `skills///`, README-class outputs - Multiple entry points: `tnmsc` CLI, private SDK, MCP stdio server, Tauri GUI - Fine-grained write-scope control (`outputScopes`, `cleanupProtection`) - Source and derivations are auditable — no silent source mutations, no hidden residuals - Memories follow the person, not the project — no leakage - ## Install ```sh @@ -30,10 +29,10 @@ npm install -g @truenine/memory-sync-mcp ## Supported Tools | Type | Tools | -|------|-------| +| --- | --- | | IDE / Editor | Cursor, Windsurf, Qoder, Trae, Trae CN, JetBrains AI, Zed, VS Code | | CLI | Claude Code, Codex CLI, Gemini CLI, Droid CLI, Opencode, Warp | -| Other outputs | AGENTS.md, Skills, README, `.editorconfig`, `.git/info/exclude` | +| Other outputs | AGENTS.md, categorized Skills, README, `.editorconfig`, `.git/info/exclude` | ## Architecture @@ -41,7 +40,6 @@ npm install -g @truenine/memory-sync-mcp - **CLI** (`tnmsc` / `@truenine/memory-sync-cli`): public command entry - **MCP** (`tnmsm` / `@truenine/memory-sync-mcp`): stdio server - **GUI** (Tauri): desktop entry - ## FAQ **If AI tools adopt a unified standard, is this project still needed?** Then it has fulfilled its historical mission. @@ -62,14 +60,12 @@ If you're scraping by in a world of profoundly unequal resources — free tiers, - Entitlement seekers who want everything handed to them - Those who glorify overwork as virtue - Malicious competitors stepping on others to climb - **This is not a tool for capital to optimise costs — it's a rat's small act of defiance in a world of resource injustice.** ## Created by - [TrueNine](https://github.com/TrueNine) - [zjarlin](https://github.com/zjarlin) - ## License [AGPL-3.0](LICENSE) \ No newline at end of file diff --git a/SECURITY.md b/SECURITY.md index b82dc51e..ad3653e6 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -24,7 +24,7 @@ Maintainers are people, not a security team — no SLA. We'll confirm as soon as CLI / SDK / MCP / GUI toolchain. Security boundaries: - **Read**: user `.src.mdx` source files, project config, global config (`~/.aindex/.tnmsc.json`), repo metadata required for sync -- **Write**: target tool config directories, managed prompt artifacts (`dist/`), generated outputs +- **Write**: target tool config directories, managed prompt artifacts paired beside their source files, generated outputs - **Cleanup**: erase managed outputs and residuals during sync or cleanup Out of scope: vulnerabilities in target AI tools themselves, user prompt content compliance, hardening third-party dependencies outside this repo. @@ -38,4 +38,4 @@ Out of scope: vulnerabilities in target AI tools themselves, user prompt content ## License -[AGPL-3.0](LICENSE). Commercial use violating the license will be pursued. \ No newline at end of file +[AGPL-3.0](LICENSE). Commercial use violating the license will be pursued. From 5051696e66f50f2a22f15da55f75b7af625b833b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 3 May 2026 00:23:05 +0800 Subject: [PATCH 38/45] feat: implement clearSavedTimer for managing timeout references and add tests for validation --- gui/src/pages/ConfigPage.tsx | 9 +- gui/src/pages/config-page-timers.test.ts | 29 ++++ gui/src/pages/config-page-timers.ts | 12 ++ mcp/src/main.rs | 195 ++++++++++++++++++++--- 4 files changed, 220 insertions(+), 25 deletions(-) create mode 100644 gui/src/pages/config-page-timers.test.ts create mode 100644 gui/src/pages/config-page-timers.ts diff --git a/gui/src/pages/ConfigPage.tsx b/gui/src/pages/ConfigPage.tsx index b6d5be5c..0bc9e74e 100644 --- a/gui/src/pages/ConfigPage.tsx +++ b/gui/src/pages/ConfigPage.tsx @@ -9,6 +9,7 @@ import JsonEditor from '@/components/JsonEditor' import { useI18n } from '@/i18n' import { cn } from '@/lib/utils' import { validateConfig } from '@/utils/configValidation' +import { clearSavedTimer } from './config-page-timers' type EditorTab = 'form' | 'json' @@ -151,6 +152,11 @@ const ConfigPage: FC = () => { useEffect(() => { loadFile() }, [loadFile]) + useEffect(() => () => { + // Fixes #372: clear the delayed "saved" reset when ConfigPage unmounts. + clearSavedTimer(savedTimerRef) + }, []) + useEffect(() => { try { const parsed: unknown = JSON.parse(content) @@ -161,7 +167,8 @@ const ConfigPage: FC = () => { }, [content]) const handleSave = useCallback(async () => { - if (savedTimerRef.current) clearTimeout(savedTimerRef.current) + // Fixes #372: replace any older timeout before scheduling a fresh saved-state reset. + clearSavedTimer(savedTimerRef) setSaveStatus({ kind: 'saving' }) try { JSON.parse(content) diff --git a/gui/src/pages/config-page-timers.test.ts b/gui/src/pages/config-page-timers.test.ts new file mode 100644 index 00000000..1d477a57 --- /dev/null +++ b/gui/src/pages/config-page-timers.test.ts @@ -0,0 +1,29 @@ +import { describe, expect, it, vi } from 'vitest' + +import { clearSavedTimer } from './config-page-timers' + +describe('clearSavedTimer', () => { + it('clears and resets an active timeout ref', () => { + const clearTimeoutSpy = vi.spyOn(globalThis, 'clearTimeout') + const timeoutHandle = setTimeout(() => {}, 10) + const timerRef = { current: timeoutHandle } + + clearSavedTimer(timerRef) + + expect(clearTimeoutSpy).toHaveBeenCalledWith(timeoutHandle) + expect(timerRef.current).toBeNull() + + clearTimeoutSpy.mockRestore() + }) + + it('ignores empty timeout refs', () => { + const clearTimeoutSpy = vi.spyOn(globalThis, 'clearTimeout') + const timerRef = { current: null } + + clearSavedTimer(timerRef) + + expect(clearTimeoutSpy).not.toHaveBeenCalled() + + clearTimeoutSpy.mockRestore() + }) +}) diff --git a/gui/src/pages/config-page-timers.ts b/gui/src/pages/config-page-timers.ts new file mode 100644 index 00000000..607043d9 --- /dev/null +++ b/gui/src/pages/config-page-timers.ts @@ -0,0 +1,12 @@ +export interface TimeoutRef { + current: ReturnType | null +} + +export function clearSavedTimer(timerRef: TimeoutRef): void { + // Fixes #372: clear the pending save-status timeout during teardown so + // ConfigPage cannot update state after the component has unmounted. + if (timerRef.current) { + clearTimeout(timerRef.current) + timerRef.current = null + } +} diff --git a/mcp/src/main.rs b/mcp/src/main.rs index 10d5bd4f..b94d5a34 100644 --- a/mcp/src/main.rs +++ b/mcp/src/main.rs @@ -47,6 +47,17 @@ fn error_result(message: &str) -> Value { }) } +fn json_rpc_error_response(id: Value, code: i64, message: &str) -> Value { + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": code, + "message": message + } + }) +} + fn handle_initialize() -> Value { json!({ "capabilities": { @@ -163,15 +174,41 @@ fn handle_tools_call(params: &Value) -> Value { } } +fn parse_object_params(request: &Value, method: &str) -> Result { + // Fixes #376: JSON-RPC object params must be validated before dispatch so + // array params return the standard -32602 Invalid params error. + match request.get("params") { + None | Some(Value::Null) => Ok(json!({})), + Some(Value::Object(_)) => Ok(request.get("params").cloned().unwrap_or(json!({}))), + Some(_) => Err(format!("Invalid params for {method}: expected object")), + } +} + +fn build_tools_call_response(id: Value, request: &Value) -> Value { + match parse_object_params(request, "tools/call") { + Ok(params) => json!({ + "jsonrpc": "2.0", + "id": id, + "result": handle_tools_call(¶ms) + }), + Err(message) => json_rpc_error_response(id, -32602, &message), + } +} + fn handle_list_prompts(args: &Value) -> Value { let base = build_service_options(args); - let kinds: Option> = args - .get("kinds") - .and_then(|v| serde_json::from_value(v.clone()).ok()); + // Fixes #384: invalid enum filters must surface as MCP errors instead of + // silently degrading to an unfiltered query. + let kinds: Option> = match parse_optional_kinds_arg(args, "kinds") { + Ok(value) => value, + Err(error) => return error_result(&error), + }; let query = args.get("query").and_then(|v| v.as_str()).map(String::from); - let en_status: Option> = args - .get("enStatus") - .and_then(|v| serde_json::from_value(v.clone()).ok()); + let en_status: Option> = + match parse_optional_prompt_state_arg(args, "enStatus") { + Ok(value) => value, + Err(error) => return error_result(&error), + }; let options = ListPromptsOptions { base, @@ -186,6 +223,33 @@ fn handle_list_prompts(args: &Value) -> Value { } } +fn parse_optional_kinds_arg( + args: &Value, + key: &str, +) -> Result>, String> { + // Fixes #384: keep enum-filter validation explicit even after the caller + // has delegated parsing into a helper. + match args.get(key) { + Some(value) if !value.is_null() => serde_json::from_value(value.clone()) + .map(Some) + .map_err(|error| format!("Invalid '{key}': {error}")), + _ => Ok(None), + } +} + +fn parse_optional_prompt_state_arg( + args: &Value, + key: &str, +) -> Result>, String> { + // Fixes #384: prompt artifact states should fail closed on invalid values. + match args.get(key) { + Some(value) if !value.is_null() => serde_json::from_value(value.clone()) + .map(Some) + .map_err(|error| format!("Invalid '{key}': {error}")), + _ => Ok(None), + } +} + fn handle_get_prompt(args: &Value) -> Value { let options = build_service_options(args); let prompt_id = match args.get("promptId").and_then(|v| v.as_str()) { @@ -298,29 +362,25 @@ fn run_stdio_server() { "id": id, "result": handle_tools_list() }), - "tools/call" => { - let params = request.get("params").cloned().unwrap_or(json!({})); - json!({ - "jsonrpc": "2.0", - "id": id, - "result": handle_tools_call(¶ms) - }) - } - _ => json!({ - "jsonrpc": "2.0", - "id": id, - "error": { - "code": -32601, - "message": format!("Method not found: {}", method) - } - }), + "tools/call" => build_tools_call_response(id, &request), + _ => json_rpc_error_response(id, -32601, &format!("Method not found: {}", method)), }; - let _ = writeln!(writer, "{}", response); - let _ = writer.flush(); + // Fixes #383: once the client closes stdout, stop the loop instead of + // continuing to process requests that can never be delivered. + if write_json_response(&mut writer, &response).is_err() { + break; + } } } +fn write_json_response(writer: &mut impl Write, response: &Value) -> std::io::Result<()> { + // Fixes #383: funnel response writes through one fallible path so BrokenPipe + // reaches the stdio loop and terminates the server cleanly. + writeln!(writer, "{}", response)?; + writer.flush() +} + fn main() -> ExitCode { // Initialize logger, default Info, override via LOG_LEVEL env var tnmsd::infra::logger::set_global_level( @@ -352,3 +412,90 @@ fn main() -> ExitCode { } } } + +#[cfg(test)] +mod tests { + use super::*; + + struct BrokenPipeWriter; + + impl Write for BrokenPipeWriter { + fn write(&mut self, _buf: &[u8]) -> std::io::Result { + Err(std::io::Error::from(std::io::ErrorKind::BrokenPipe)) + } + + fn flush(&mut self) -> std::io::Result<()> { + Ok(()) + } + } + + #[test] + fn list_prompts_rejects_invalid_kinds_filter() { + let result = handle_list_prompts(&json!({ + "kinds": ["projct-memory"] + })); + + assert_eq!(result.get("isError").and_then(Value::as_bool), Some(true)); + assert!( + result + .get("content") + .and_then(Value::as_array) + .and_then(|items| items.first()) + .and_then(|item| item.get("text")) + .and_then(Value::as_str) + .is_some_and(|text| text.contains("kinds")), + "invalid kinds filter should surface an MCP error" + ); + } + + #[test] + fn list_prompts_rejects_invalid_en_status_filter() { + let result = handle_list_prompts(&json!({ + "enStatus": ["unkown"] + })); + + assert_eq!(result.get("isError").and_then(Value::as_bool), Some(true)); + assert!( + result + .get("content") + .and_then(Value::as_array) + .and_then(|items| items.first()) + .and_then(|item| item.get("text")) + .and_then(Value::as_str) + .is_some_and(|text| text.contains("enStatus")), + "invalid enStatus filter should surface an MCP error" + ); + } + + #[test] + fn write_json_response_propagates_broken_pipe_errors() { + let mut writer = BrokenPipeWriter; + let result = write_json_response(&mut writer, &json!({"ok": true})); + + assert!( + result.is_err(), + "broken pipe writes must be visible to the stdio server loop" + ); + } + + #[test] + fn tools_call_rejects_array_params_with_json_rpc_invalid_params() { + let response = build_tools_call_response( + json!(7), + &json!({ + "jsonrpc": "2.0", + "id": 7, + "method": "tools/call", + "params": [] + }), + ); + + assert_eq!(response["error"]["code"], json!(-32602)); + assert!( + response["error"]["message"] + .as_str() + .is_some_and(|message| message.contains("expected object")), + "unexpected invalid params error: {response}" + ); + } +} From 37e4538303715559effe8a7f3daf2a30df0ab2d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 3 May 2026 00:23:24 +0800 Subject: [PATCH 39/45] feat: enhance package assembly process to report copy errors and handle UTF-8 workspace paths --- cli/src/bin/tnmsc-test-api.rs | 56 +++++++++- cli/src/cli.rs | 2 +- cli/src/commands/package.rs | 194 ++++++++++++++++++++++++++++------ 3 files changed, 213 insertions(+), 39 deletions(-) diff --git a/cli/src/bin/tnmsc-test-api.rs b/cli/src/bin/tnmsc-test-api.rs index 1fded802..c28b8b35 100644 --- a/cli/src/bin/tnmsc-test-api.rs +++ b/cli/src/bin/tnmsc-test-api.rs @@ -47,6 +47,22 @@ fn print_result(result: Result) -> ExitCode { } } +fn build_collect_aindex_resolvers_input( + workspace_dir: &std::path::Path, +) -> Result { + let workspace_dir = workspace_dir.to_str().ok_or_else(|| { + // Fixes #382: test/debug tooling must reject non-UTF-8 workspace paths + // explicitly instead of silently corrupting them with to_string_lossy(). + tnmsd::CliError::ConfigError( + "CollectAindexResolvers requires --workspace-dir to be valid UTF-8".to_string(), + ) + })?; + + Ok(serde_json::json!({ + "workspaceDir": workspace_dir, + })) +} + fn main() -> ExitCode { let cli = Cli::parse(); @@ -61,12 +77,42 @@ fn main() -> ExitCode { .map_err(tnmsd::CliError::ExecutionError), ), Command::CollectAindexResolvers(args) => { - let input = serde_json::json!({ - "workspaceDir": args.workspace_dir.to_string_lossy(), + let result = build_collect_aindex_resolvers_input(&args.workspace_dir).and_then(|input| { + tnmsd::repositories::aindex_resolvers::collect_aindex_resolvers(&input.to_string()) }); - print_result( - tnmsd::repositories::aindex_resolvers::collect_aindex_resolvers(&input.to_string()), - ) + print_result(result) } } } + +#[cfg(test)] +mod tests { + use super::build_collect_aindex_resolvers_input; + use std::path::Path; + + #[test] + fn collect_aindex_resolvers_input_preserves_utf8_workspace_dir() { + let input = build_collect_aindex_resolvers_input(Path::new("/tmp/demo")).unwrap(); + + assert_eq!(input["workspaceDir"], serde_json::json!("/tmp/demo")); + } + + #[cfg(unix)] + #[test] + fn collect_aindex_resolvers_input_rejects_non_utf8_workspace_dir() { + use std::ffi::OsString; + use std::os::unix::ffi::OsStringExt; + use std::path::PathBuf; + + let invalid_path = PathBuf::from(OsString::from_vec(vec![0x66, 0x6f, 0x80, 0x6f])); + let result = build_collect_aindex_resolvers_input(&invalid_path); + + assert!( + result + .as_ref() + .err() + .is_some_and(|error| error.to_string().contains("valid UTF-8")), + "unexpected result: {result:?}" + ); + } +} diff --git a/cli/src/cli.rs b/cli/src/cli.rs index 2773f5a7..22794c9c 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -195,8 +195,8 @@ pub fn resolve_command(cli: &Cli) -> ResolvedCommand { #[cfg(test)] mod tests { use super::*; - use clap::error::ErrorKind; use clap::Parser; + use clap::error::ErrorKind; #[test] fn resolve_command_defaults_to_install() { diff --git a/cli/src/commands/package.rs b/cli/src/commands/package.rs index 36fab300..1ec28d67 100644 --- a/cli/src/commands/package.rs +++ b/cli/src/commands/package.rs @@ -10,6 +10,16 @@ struct PackageTarget { binary_name: &'static str, } +struct PackageAssemblyReport { + copied: Vec, + skipped: Vec, +} + +enum LocalBuildCopyAttempt { + Copied(PathBuf), + MissingBinary(String), +} + const PACKAGE_TARGETS: &[PackageTarget] = &[ PackageTarget { suffix: "linux-x64-gnu", @@ -40,10 +50,15 @@ const PACKAGE_TARGETS: &[PackageTarget] = &[ pub fn execute(args: &AssembleNpmArgs) -> ExitCode { match assemble_packages(args) { - Ok(copied) => { - for path in copied { + Ok(report) => { + for path in report.copied { println!("Hydrated {}", path.display()); } + // Fixes #381: best-effort assembly still needs to explain skipped targets, + // otherwise partial output looks like a complete success. + for skipped in report.skipped { + eprintln!("Skipped {skipped}"); + } ExitCode::SUCCESS } Err(error) => { @@ -53,51 +68,46 @@ pub fn execute(args: &AssembleNpmArgs) -> ExitCode { } } -fn assemble_packages(args: &AssembleNpmArgs) -> Result, String> { +fn assemble_packages(args: &AssembleNpmArgs) -> Result { if let Some(artifacts_dir) = args.artifacts_dir.as_deref() { return PACKAGE_TARGETS .iter() .map(|target| copy_target_from_artifacts(target, artifacts_dir)) - .collect(); + .collect::, _>>() + .map(|copied| PackageAssemblyReport { + copied, + skipped: Vec::new(), + }); } - // 尝试复制所有目标,优先使用交叉编译产物,回退到本地主机构建 + // Fixes #381: missing targets stay best-effort, but real copy errors and skips + // are now surfaced instead of being silently discarded. let mut copied = Vec::new(); + let mut skipped = Vec::new(); for target in PACKAGE_TARGETS { - if let Ok(path) = copy_target_from_local_build(target, &args.profile) { - copied.push(path); + match try_copy_target_from_local_build(target, &args.profile)? { + LocalBuildCopyAttempt::Copied(path) => copied.push(path), + LocalBuildCopyAttempt::MissingBinary(reason) => skipped.push(reason), } } if copied.is_empty() { let host_target = detect_host_target()?; - copy_target_from_local_build(host_target, &args.profile).map(|path| vec![path]) + copy_target_from_local_build(host_target, &args.profile).map(|path| PackageAssemblyReport { + copied: vec![path], + skipped: Vec::new(), + }) } else { - Ok(copied) + Ok(PackageAssemblyReport { copied, skipped }) } } -fn copy_target_from_artifacts( +fn try_copy_target_from_local_build( target: &PackageTarget, - artifacts_dir: &Path, -) -> Result { - let source = artifacts_dir - .join(format!("cli-binary-{}", target.suffix)) - .join(target.binary_name); - - if !source.is_file() { - return Err(format!( - "Missing artifact binary for {} at {}", - target.suffix, - source.display() - )); - } - - copy_into_package(target, &source) -} - -fn copy_target_from_local_build(target: &PackageTarget, profile: &str) -> Result { - // 首先尝试从交叉编译目标目录查找 + profile: &str, +) -> Result { + // Fixes #381: distinguish "target was never built" from "copy failed" so the + // caller can keep best-effort behavior without swallowing real I/O errors. let target_triple = target_to_triple(target.suffix); let cross_source = workspace_root() .join("target") @@ -106,29 +116,56 @@ fn copy_target_from_local_build(target: &PackageTarget, profile: &str) -> Result .join(target.binary_name); if cross_source.is_file() { - return copy_into_package(target, &cross_source); + return copy_into_package(target, &cross_source).map(LocalBuildCopyAttempt::Copied); } - // 回退到本地主机构建目录 let source = workspace_root() .join("target") .join(profile) .join(target.binary_name); if !source.is_file() { - return Err(format!( - "Missing binary for {}. Tried:\n - {}\n - {}\n\nRun cargo build --{} --target {} -p tnmsc first.", + return Ok(LocalBuildCopyAttempt::MissingBinary(format!( + "{}: missing binary. Tried:\n - {}\n - {}\n Run cargo build --{} --target {} -p tnmsc first.", target.suffix, cross_source.display(), source.display(), profile, target_triple + ))); + } + + copy_into_package(target, &source).map(LocalBuildCopyAttempt::Copied) +} + +fn copy_target_from_artifacts( + target: &PackageTarget, + artifacts_dir: &Path, +) -> Result { + let source = artifacts_dir + .join(format!("cli-binary-{}", target.suffix)) + .join(target.binary_name); + + if !source.is_file() { + return Err(format!( + "Missing artifact binary for {} at {}", + target.suffix, + source.display() )); } copy_into_package(target, &source) } +fn copy_target_from_local_build(target: &PackageTarget, profile: &str) -> Result { + // Fixes #381: the host-target fallback still needs the old fail-fast contract, + // so convert the richer attempt result back into a plain error here. + match try_copy_target_from_local_build(target, profile)? { + LocalBuildCopyAttempt::Copied(path) => Ok(path), + LocalBuildCopyAttempt::MissingBinary(reason) => Err(reason), + } +} + fn target_to_triple(suffix: &str) -> &str { match suffix { "linux-x64-gnu" => "x86_64-unknown-linux-gnu", @@ -222,3 +259,94 @@ fn set_executable_permissions(path: &Path) -> Result<(), String> { fn set_executable_permissions(_path: &Path) -> Result<(), String> { Ok(()) } + +#[cfg(test)] +mod tests { + use std::sync::{Mutex, OnceLock}; + use std::time::{SystemTime, UNIX_EPOCH}; + + use super::*; + + fn test_env_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + fn unique_temp_dir(label: &str) -> PathBuf { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("system time should be after unix epoch") + .as_nanos(); + let path = std::env::temp_dir().join(format!( + "tnmsc-package-tests-{label}-{}-{nanos}", + std::process::id() + )); + fs::create_dir_all(&path).expect("temp dir should be created"); + path + } + + #[test] + fn assemble_packages_reports_copy_errors_instead_of_silently_skipping_targets() { + let _guard = test_env_lock() + .lock() + .expect("test env lock should not poison"); + let package_root = unique_temp_dir("package-root"); + let workspace_root = unique_temp_dir("workspace-root"); + + let release_dir = workspace_root.join("target"); + let linux_x64_dir = release_dir.join("x86_64-unknown-linux-gnu").join("release"); + let linux_arm64_dir = release_dir + .join("aarch64-unknown-linux-gnu") + .join("release"); + fs::create_dir_all(&linux_x64_dir).expect("x64 target dir should exist"); + fs::create_dir_all(&linux_arm64_dir).expect("arm64 target dir should exist"); + fs::write(linux_x64_dir.join("tnmsc"), "x64").expect("x64 binary should exist"); + fs::write(linux_arm64_dir.join("tnmsc"), "arm64").expect("arm64 binary should exist"); + + let broken_bin_path = package_root.join("npm").join("linux-arm64-gnu").join("bin"); + fs::create_dir_all( + broken_bin_path + .parent() + .expect("broken bin parent should be present"), + ) + .expect("broken bin parent dir should exist"); + fs::write(&broken_bin_path, "not-a-directory").expect("broken bin marker should exist"); + + let previous_package_root = std::env::var_os("TNMSC_NPM_PACKAGE_ROOT"); + let previous_workspace_root = std::env::var_os("TNMSC_WORKSPACE_ROOT"); + unsafe { + std::env::set_var("TNMSC_NPM_PACKAGE_ROOT", &package_root); + std::env::set_var("TNMSC_WORKSPACE_ROOT", &workspace_root); + } + + let result = assemble_packages(&AssembleNpmArgs { + artifacts_dir: None, + profile: "release".to_string(), + }); + + match previous_package_root { + Some(value) => unsafe { + std::env::set_var("TNMSC_NPM_PACKAGE_ROOT", value); + }, + None => unsafe { + std::env::remove_var("TNMSC_NPM_PACKAGE_ROOT"); + }, + } + match previous_workspace_root { + Some(value) => unsafe { + std::env::set_var("TNMSC_WORKSPACE_ROOT", value); + }, + None => unsafe { + std::env::remove_var("TNMSC_WORKSPACE_ROOT"); + }, + } + + assert!( + result.is_err(), + "copy errors for discovered local targets must not be silently skipped" + ); + + let _ = fs::remove_dir_all(package_root); + let _ = fs::remove_dir_all(workspace_root); + } +} From 951e0c499549561c70eab6c0598a945acee8d215 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 3 May 2026 00:45:00 +0800 Subject: [PATCH 40/45] style: format assertions and binary metadata checks for improved readability --- cli/local-tests/src/lib.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/cli/local-tests/src/lib.rs b/cli/local-tests/src/lib.rs index da63e3c7..92ba8ba1 100644 --- a/cli/local-tests/src/lib.rs +++ b/cli/local-tests/src/lib.rs @@ -674,11 +674,16 @@ mod tests { let root_modified = newest_modified_time(&root.join("sdk")).unwrap(); let late_modified = fs::metadata(&late).unwrap().modified().unwrap(); - assert_eq!(system_time_key(root_modified), system_time_key(late_modified)); + assert_eq!( + system_time_key(root_modified), + system_time_key(late_modified) + ); } fn is_binary_stale_for_paths(binary: &Path, inputs: &[PathBuf]) -> bool { - let Some(binary_modified) = fs::metadata(binary).ok().and_then(|meta| meta.modified().ok()) + let Some(binary_modified) = fs::metadata(binary) + .ok() + .and_then(|meta| meta.modified().ok()) else { return true; }; From 8ac7a21bafccbb34e2ec0e0a77264256eaf9100b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 3 May 2026 00:53:40 +0800 Subject: [PATCH 41/45] feat: enhance output plans to support child memory prompts and global memory - Added support for emitting nested .cursorrules files for child memory prompts in cursor_output_plan. - Updated droid_output_plan to use global memory when AgentsOutputAdaptor is active and emit nested AGENTS.md files for child prompts. - Enhanced gemini_output_plan to emit global-only project files when AgentsOutputAdaptor is registered and maintain nested child memory files. - Modified opencode_output_plan to collapse project memory to global-only payload while AgentsOutputAdaptor is active and emit nested AGENTS.md files for child prompts. - Updated trae_output_plan to emit nested steering files for child memory prompts. - Enhanced windsurf_output_plan to emit nested .windsurfrules files for child memory prompts. - Introduced regression tests to ensure child memory prompts are emitted across all target plans and that global memory is used when AgentsOutputAdaptor is active. - Improved Deno runtime to restrict script execution to allowed script roots and prevent leaking absolute paths. - Refactored prompt_service to safely handle relative paths and prevent leaking absolute paths through the prompt catalog. --- .../output_plans/claude_code_output_plan.rs | 259 +++++++++++------- .../domain/output_plans/codex_output_plan.rs | 245 ++++++++++++----- .../domain/output_plans/cursor_output_plan.rs | 14 + .../domain/output_plans/droid_output_plan.rs | 68 +++-- .../domain/output_plans/gemini_output_plan.rs | 68 +++-- sdk/src/domain/output_plans/mod.rs | 230 ++++++++++++++++ .../output_plans/opencode_output_plan.rs | 244 +++++++++++------ .../domain/output_plans/trae_output_plan.rs | 18 ++ .../output_plans/windsurf_output_plan.rs | 14 + sdk/src/infra/deno_runtime.rs | 148 +++++++++- sdk/src/repositories/skill.rs | 26 +- sdk/src/services/prompt_service.rs | 55 +++- 12 files changed, 1083 insertions(+), 306 deletions(-) diff --git a/sdk/src/domain/output_plans/claude_code_output_plan.rs b/sdk/src/domain/output_plans/claude_code_output_plan.rs index 1342dfd1..dace7c70 100644 --- a/sdk/src/domain/output_plans/claude_code_output_plan.rs +++ b/sdk/src/domain/output_plans/claude_code_output_plan.rs @@ -14,6 +14,7 @@ const CLAUDE_CODE_MEMORY_FILE: &str = "CLAUDE.md"; const CLAUDE_CODE_SETTINGS_FILE: &str = "settings.json"; const CLAUDE_CODE_SETTINGS_LOCAL_FILE: &str = "settings.local.json"; const CLAUDE_CODE_GLOBAL_CONFIG_DIR: &str = ".claude"; +const AGENTS_OUTPUT_ADAPTOR: &str = "AgentsOutputAdaptor"; const PROJECT_SCOPE: &str = "project"; pub fn collect_claude_code_output_plan(context_json: &str) -> Result { @@ -44,51 +45,77 @@ fn build_output_files( ) -> Vec { let mut output_files = Vec::new(); let prompt_projects = get_project_prompt_output_projects(workspace); - - // 项目级 CLAUDE.md(根目录 + 子目录) - // 工作区根 CLAUDE.md 需要同时携带全局 memory 和工作区 prompt, - // 这样打包 CLI 在裸容器里安装后也能直接看到完整的 Claude 上下文。 - for project in &prompt_projects { - let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { - continue; - }; - - if let Some(root_prompt) = project.root_memory_prompt.as_ref() { - let content = if project.is_workspace_root_project == Some(true) { - merge_workspace_root_memory( - context - .global_memory - .as_ref() - .map(|prompt| prompt.content.as_str()), - &root_prompt.content, - ) - } else { - root_prompt.content.clone() + let agents_registered = context + .registered_output_plugins + .as_ref() + .map(|plugins| plugins.iter().any(|name| name == AGENTS_OUTPUT_ADAPTOR)) + .unwrap_or(false); + + if agents_registered { + // Fixes #379: Claude's project files should switch to the global-only memory + // payload while AgentsOutputAdaptor is registered. + if let Some(global_memory) = context.global_memory.as_ref() { + for project in &prompt_projects { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + output_files.push(BaseOutputFileDeclarationDto { + path: project_root_dir + .join(CLAUDE_CODE_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: global_memory.content.clone(), + encoding: None, + }); + } + } + } else { + // 项目级 CLAUDE.md(根目录 + 子目录) + // 工作区根 CLAUDE.md 需要同时携带全局 memory 和工作区 prompt, + // 这样打包 CLI 在裸容器里安装后也能直接看到完整的 Claude 上下文。 + for project in &prompt_projects { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; }; - output_files.push(BaseOutputFileDeclarationDto { - path: project_root_dir - .join(CLAUDE_CODE_MEMORY_FILE) - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content, - encoding: None, - }); - } + if let Some(root_prompt) = project.root_memory_prompt.as_ref() { + let content = if project.is_workspace_root_project == Some(true) { + merge_workspace_root_memory( + context + .global_memory + .as_ref() + .map(|prompt| prompt.content.as_str()), + &root_prompt.content, + ) + } else { + root_prompt.content.clone() + }; - if let Some(child_prompts) = project.child_memory_prompts.as_ref() { - for child_prompt in child_prompts { output_files.push(BaseOutputFileDeclarationDto { - path: resolve_relative_path(&child_prompt.dir) + path: project_root_dir .join(CLAUDE_CODE_MEMORY_FILE) .to_string_lossy() .into_owned(), scope: Some(PROJECT_SCOPE.to_string()), - content: child_prompt.content.clone(), + content, encoding: None, }); } + + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + for child_prompt in child_prompts { + output_files.push(BaseOutputFileDeclarationDto { + path: resolve_relative_path(&child_prompt.dir) + .join(CLAUDE_CODE_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: child_prompt.content.clone(), + encoding: None, + }); + } + } } } @@ -367,7 +394,9 @@ fn append_skill_supporting_files( for child_doc in child_docs { output_files.push(BaseOutputFileDeclarationDto { path: skill_sub_dir - .join(resolve_child_doc_output_relative_path(&child_doc.relative_path)) + .join(resolve_child_doc_output_relative_path( + &child_doc.relative_path, + )) .to_string_lossy() .into_owned(), scope: Some(PROJECT_SCOPE.to_string()), @@ -387,9 +416,7 @@ fn append_skill_supporting_files( scope: Some(PROJECT_SCOPE.to_string()), content: resource.content.clone(), encoding: match resource.encoding { - crate::domain::plugin_shared::SkillResourceEncoding::Base64 => { - Some("base64".to_string()) - } + crate::domain::plugin_shared::SkillResourceEncoding::Base64 => Some("base64".to_string()), crate::domain::plugin_shared::SkillResourceEncoding::Text => None, }, }); @@ -398,7 +425,10 @@ fn append_skill_supporting_files( if let Some(mcp_config) = skill.mcp_config.as_ref() { output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir.join("mcp.json").to_string_lossy().into_owned(), + path: skill_sub_dir + .join("mcp.json") + .to_string_lossy() + .into_owned(), scope: Some(PROJECT_SCOPE.to_string()), content: mcp_config.raw_content.clone(), encoding: None, @@ -528,61 +558,68 @@ mod tests { description: Some("desc".to_string()), ..SkillYAMLFrontMatter::default() }), - child_docs: Some(vec![SkillChildDoc { - prompt_type: PromptKind::SkillChildDoc, - content: "guide".to_string(), - length: 5, - file_path_kind: crate::infra::path_types::FilePathKind::Relative, - relative_path: "guide.mdx".to_string(), - dir: crate::infra::path_types::RelativePath::new( - "guide.mdx", - "/workspace/aindex/skills/test", - ), - raw_front_matter: None, - markdown_ast: None, - markdown_contents: None, - }, SkillChildDoc { - prompt_type: PromptKind::SkillChildDoc, - content: "linux-wsl".to_string(), - length: 9, - file_path_kind: crate::infra::path_types::FilePathKind::Relative, - relative_path: "references/linux-wsl.mdx".to_string(), - dir: crate::infra::path_types::RelativePath::new( - "references/linux-wsl.mdx", - "/workspace/aindex/skills/test", - ), - raw_front_matter: None, - markdown_ast: None, - markdown_contents: None, - }]), - resources: Some(vec![SkillResource { - prompt_type: PromptKind::SkillResource, - extension: "txt".to_string(), - file_name: "notes.txt".to_string(), - relative_path: "assets/notes.txt".to_string(), - content: "notes".to_string(), - encoding: SkillResourceEncoding::Text, - length: 5, - mime_type: None, - }, SkillResource { - prompt_type: PromptKind::SkillResource, - extension: "sh".to_string(), - file_name: "capture-workflow.sh".to_string(), - relative_path: "templates/capture-workflow.sh".to_string(), - content: "#!/usr/bin/env bash\necho capture\n".to_string(), - encoding: SkillResourceEncoding::Text, - length: 32, - mime_type: None, - }, SkillResource { - prompt_type: PromptKind::SkillResource, - extension: "bin".to_string(), - file_name: "blob.bin".to_string(), - relative_path: "assets/blob.bin".to_string(), - content: "AAEC".to_string(), - encoding: SkillResourceEncoding::Base64, - length: 3, - mime_type: Some("application/octet-stream".to_string()), - }]), + child_docs: Some(vec![ + SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "guide".to_string(), + length: 5, + file_path_kind: crate::infra::path_types::FilePathKind::Relative, + relative_path: "guide.mdx".to_string(), + dir: crate::infra::path_types::RelativePath::new( + "guide.mdx", + "/workspace/aindex/skills/test", + ), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }, + SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "linux-wsl".to_string(), + length: 9, + file_path_kind: crate::infra::path_types::FilePathKind::Relative, + relative_path: "references/linux-wsl.mdx".to_string(), + dir: crate::infra::path_types::RelativePath::new( + "references/linux-wsl.mdx", + "/workspace/aindex/skills/test", + ), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }, + ]), + resources: Some(vec![ + SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "txt".to_string(), + file_name: "notes.txt".to_string(), + relative_path: "assets/notes.txt".to_string(), + content: "notes".to_string(), + encoding: SkillResourceEncoding::Text, + length: 5, + mime_type: None, + }, + SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "sh".to_string(), + file_name: "capture-workflow.sh".to_string(), + relative_path: "templates/capture-workflow.sh".to_string(), + content: "#!/usr/bin/env bash\necho capture\n".to_string(), + encoding: SkillResourceEncoding::Text, + length: 32, + mime_type: None, + }, + SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "bin".to_string(), + file_name: "blob.bin".to_string(), + relative_path: "assets/blob.bin".to_string(), + content: "AAEC".to_string(), + encoding: SkillResourceEncoding::Base64, + length: 3, + mime_type: Some("application/octet-stream".to_string()), + }, + ]), mcp_config: Some(SkillMcpConfig { prompt_type: PromptKind::SkillMcpConfig, mcp_servers: std::collections::HashMap::new(), @@ -637,10 +674,26 @@ mod tests { ); assert!(skill_paths.iter().any(|path| path.ends_with("SKILL.md"))); assert!(skill_paths.iter().any(|path| path.ends_with("guide.md"))); - assert!(skill_paths.iter().any(|path| path.ends_with("references/linux-wsl.md"))); - assert!(skill_paths.iter().any(|path| path.ends_with("assets/notes.txt"))); - assert!(skill_paths.iter().any(|path| path.ends_with("templates/capture-workflow.sh"))); - assert!(skill_paths.iter().any(|path| path.ends_with("assets/blob.bin"))); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("references/linux-wsl.md")) + ); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("assets/notes.txt")) + ); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("templates/capture-workflow.sh")) + ); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("assets/blob.bin")) + ); assert!(skill_paths.iter().any(|path| path.ends_with("mcp.json"))); let binary_resource = plan @@ -692,9 +745,15 @@ mod tests { }) .unwrap(); - assert!(skill_file.content.contains("name: dev-tools-reverse-engineering")); assert!( - skill_file.content.contains("skill: aindex/skills/dev-tools/reverse-engineering") + skill_file + .content + .contains("name: dev-tools-reverse-engineering") + ); + assert!( + skill_file + .content + .contains("skill: aindex/skills/dev-tools/reverse-engineering") ); } } diff --git a/sdk/src/domain/output_plans/codex_output_plan.rs b/sdk/src/domain/output_plans/codex_output_plan.rs index 299cd5d1..c454341d 100644 --- a/sdk/src/domain/output_plans/codex_output_plan.rs +++ b/sdk/src/domain/output_plans/codex_output_plan.rs @@ -30,6 +30,7 @@ const CODEX_GLOBAL_CONFIG_DIR: &str = ".codex"; const CODEX_PROMPTS_DIR: &str = "prompts"; const CODEX_AGENTS_DIR: &str = "agents"; const CODEX_SKILLS_DIR: &str = "skills"; +const AGENTS_OUTPUT_ADAPTOR: &str = "AgentsOutputAdaptor"; const PROJECT_SCOPE: &str = "project"; fn resolve_skill_dir_name(skill: &crate::domain::plugin_shared::SkillPrompt) -> String { @@ -79,7 +80,13 @@ fn build_output_files( context: &OutputContext, ) -> Vec { let mut output_files = Vec::new(); + let prompt_projects = get_project_prompt_output_projects(workspace); let project_output_projects = get_project_output_projects(workspace); + let agents_registered = context + .registered_output_plugins + .as_ref() + .map(|plugins| plugins.iter().any(|name| name == AGENTS_OUTPUT_ADAPTOR)) + .unwrap_or(false); // Global ~/.codex/AGENTS.md (use raw content to match aindex/global.mdx) if let Some(global_memory) = context.global_memory.as_ref() { @@ -100,6 +107,61 @@ fn build_output_files( }); } + if agents_registered { + // Fixes #379: Codex project AGENTS.md files should switch to the + // dedicated global-memory payload when AgentsOutputAdaptor is active. + if let Some(global_memory) = context.global_memory.as_ref() { + for project in &prompt_projects { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + output_files.push(BaseOutputFileDeclarationDto { + path: project_root_dir + .join(CODEX_INSTRUCTIONS_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: global_memory.content.clone(), + encoding: None, + }); + } + } + } else { + let global_memory_content = context.global_memory.as_ref().map(|m| m.content.as_str()); + for project in &prompt_projects { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + + if let Some(root_prompt) = project.root_memory_prompt.as_ref() { + output_files.push(BaseOutputFileDeclarationDto { + path: project_root_dir + .join(CODEX_INSTRUCTIONS_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: combine_global_with_content(global_memory_content, &root_prompt.content), + encoding: None, + }); + } + + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + // Fixes #380: Codex must emit nested AGENTS.md files for child memory prompts. + for child_prompt in child_prompts { + output_files.push(BaseOutputFileDeclarationDto { + path: resolve_relative_path(&child_prompt.dir) + .join(CODEX_INSTRUCTIONS_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: child_prompt.content.clone(), + encoding: None, + }); + } + } + } + } + // Global ~/.codex/prompts/ (from commands) if let Some(commands) = context.slash_commands.as_ref() { let codex_prompts_dir = resolve_effective_home_dir() @@ -299,7 +361,9 @@ fn append_skill_supporting_files( for child_doc in child_docs { output_files.push(BaseOutputFileDeclarationDto { path: skill_sub_dir - .join(resolve_child_doc_output_relative_path(&child_doc.relative_path)) + .join(resolve_child_doc_output_relative_path( + &child_doc.relative_path, + )) .to_string_lossy() .into_owned(), scope: Some(PROJECT_SCOPE.to_string()), @@ -319,9 +383,7 @@ fn append_skill_supporting_files( scope: Some(PROJECT_SCOPE.to_string()), content: resource.content.clone(), encoding: match resource.encoding { - crate::domain::plugin_shared::SkillResourceEncoding::Base64 => { - Some("base64".to_string()) - } + crate::domain::plugin_shared::SkillResourceEncoding::Base64 => Some("base64".to_string()), crate::domain::plugin_shared::SkillResourceEncoding::Text => None, }, }); @@ -330,7 +392,10 @@ fn append_skill_supporting_files( if let Some(mcp_config) = skill.mcp_config.as_ref() { output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir.join("mcp.json").to_string_lossy().into_owned(), + path: skill_sub_dir + .join("mcp.json") + .to_string_lossy() + .into_owned(), scope: Some(PROJECT_SCOPE.to_string()), content: mcp_config.raw_content.clone(), encoding: None, @@ -471,6 +536,15 @@ fn camel_to_kebab(s: &str) -> String { result } +fn combine_global_with_content(global_content: Option<&str>, project_content: &str) -> String { + match global_content { + Some(global) if !global.trim().is_empty() => { + format!("{}\n\n{}", global.trim(), project_content.trim()) + } + _ => project_content.to_string(), + } +} + fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { let mut delete = Vec::new(); @@ -508,6 +582,18 @@ fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { continue; }; + delete.push(CleanupTargetDto { + path: project_root_dir + .join(CODEX_INSTRUCTIONS_FILE) + .to_string_lossy() + .into_owned(), + kind: CleanupTargetKindDto::File, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: Some(PROJECT_SCOPE.to_string()), + label: Some("delete.project".to_string()), + }); + let codex_dir = project_root_dir.join(CODEX_GLOBAL_CONFIG_DIR); delete.push(CleanupTargetDto { @@ -560,6 +646,13 @@ fn get_project_output_projects(workspace: &Workspace) -> Vec<&Project> { projects } +fn get_project_prompt_output_projects(workspace: &Workspace) -> Vec<&Project> { + get_project_output_projects(workspace) + .into_iter() + .filter(|project| project.is_prompt_source_project != Some(true)) + .collect() +} + fn resolve_project_root_dir(workspace: &Workspace, project: &Project) -> Option { if project.is_workspace_root_project == Some(true) { return Some(PathBuf::from(&workspace.directory.path)); @@ -599,58 +692,62 @@ mod tests { description: Some("desc".to_string()), ..SkillYAMLFrontMatter::default() }), - child_docs: Some(vec![SkillChildDoc { - prompt_type: PromptKind::SkillChildDoc, - content: "guide".to_string(), - length: 5, - file_path_kind: FilePathKind::Relative, - relative_path: "guide.mdx".to_string(), - dir: RelativePath::new("guide.mdx", "/workspace/aindex/skills/test"), - raw_front_matter: None, - markdown_ast: None, - markdown_contents: None, - }, SkillChildDoc { - prompt_type: PromptKind::SkillChildDoc, - content: "linux-wsl".to_string(), - length: 9, - file_path_kind: FilePathKind::Relative, - relative_path: "references/linux-wsl.mdx".to_string(), - dir: RelativePath::new( - "references/linux-wsl.mdx", - "/workspace/aindex/skills/test", - ), - raw_front_matter: None, - markdown_ast: None, - markdown_contents: None, - }]), - resources: Some(vec![SkillResource { - prompt_type: PromptKind::SkillResource, - extension: "txt".to_string(), - file_name: "notes.txt".to_string(), - relative_path: "assets/notes.txt".to_string(), - content: "notes".to_string(), - encoding: SkillResourceEncoding::Text, - length: 5, - mime_type: None, - }, SkillResource { - prompt_type: PromptKind::SkillResource, - extension: "sh".to_string(), - file_name: "capture-workflow.sh".to_string(), - relative_path: "templates/capture-workflow.sh".to_string(), - content: "#!/usr/bin/env bash\necho capture\n".to_string(), - encoding: SkillResourceEncoding::Text, - length: 32, - mime_type: None, - }, SkillResource { - prompt_type: PromptKind::SkillResource, - extension: "bin".to_string(), - file_name: "blob.bin".to_string(), - relative_path: "assets/blob.bin".to_string(), - content: "AAEC".to_string(), - encoding: SkillResourceEncoding::Base64, - length: 3, - mime_type: Some("application/octet-stream".to_string()), - }]), + child_docs: Some(vec![ + SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "guide".to_string(), + length: 5, + file_path_kind: FilePathKind::Relative, + relative_path: "guide.mdx".to_string(), + dir: RelativePath::new("guide.mdx", "/workspace/aindex/skills/test"), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }, + SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "linux-wsl".to_string(), + length: 9, + file_path_kind: FilePathKind::Relative, + relative_path: "references/linux-wsl.mdx".to_string(), + dir: RelativePath::new("references/linux-wsl.mdx", "/workspace/aindex/skills/test"), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }, + ]), + resources: Some(vec![ + SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "txt".to_string(), + file_name: "notes.txt".to_string(), + relative_path: "assets/notes.txt".to_string(), + content: "notes".to_string(), + encoding: SkillResourceEncoding::Text, + length: 5, + mime_type: None, + }, + SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "sh".to_string(), + file_name: "capture-workflow.sh".to_string(), + relative_path: "templates/capture-workflow.sh".to_string(), + content: "#!/usr/bin/env bash\necho capture\n".to_string(), + encoding: SkillResourceEncoding::Text, + length: 32, + mime_type: None, + }, + SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "bin".to_string(), + file_name: "blob.bin".to_string(), + relative_path: "assets/blob.bin".to_string(), + content: "AAEC".to_string(), + encoding: SkillResourceEncoding::Base64, + length: 3, + mime_type: Some("application/octet-stream".to_string()), + }, + ]), mcp_config: Some(SkillMcpConfig { prompt_type: PromptKind::SkillMcpConfig, mcp_servers: std::collections::HashMap::new(), @@ -703,10 +800,26 @@ mod tests { ); assert!(skill_paths.iter().any(|path| path.ends_with("SKILL.md"))); assert!(skill_paths.iter().any(|path| path.ends_with("guide.md"))); - assert!(skill_paths.iter().any(|path| path.ends_with("references/linux-wsl.md"))); - assert!(skill_paths.iter().any(|path| path.ends_with("assets/notes.txt"))); - assert!(skill_paths.iter().any(|path| path.ends_with("templates/capture-workflow.sh"))); - assert!(skill_paths.iter().any(|path| path.ends_with("assets/blob.bin"))); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("references/linux-wsl.md")) + ); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("assets/notes.txt")) + ); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("templates/capture-workflow.sh")) + ); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("assets/blob.bin")) + ); assert!(skill_paths.iter().any(|path| path.ends_with("mcp.json"))); let binary_resource = plan @@ -756,9 +869,15 @@ mod tests { }) .unwrap(); - assert!(skill_file.content.contains("name: dev-tools-reverse-engineering")); assert!( - skill_file.content.contains("skill: aindex/skills/dev-tools/reverse-engineering") + skill_file + .content + .contains("name: dev-tools-reverse-engineering") + ); + assert!( + skill_file + .content + .contains("skill: aindex/skills/dev-tools/reverse-engineering") ); } } diff --git a/sdk/src/domain/output_plans/cursor_output_plan.rs b/sdk/src/domain/output_plans/cursor_output_plan.rs index 23e0c270..2a560a02 100644 --- a/sdk/src/domain/output_plans/cursor_output_plan.rs +++ b/sdk/src/domain/output_plans/cursor_output_plan.rs @@ -80,6 +80,20 @@ fn build_output_files( encoding: None, }); } + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + // Fixes #380: Cursor needs nested .cursorrules files for child memory prompts too. + for child_prompt in child_prompts { + output_files.push(BaseOutputFileDeclarationDto { + path: resolve_relative_path(&child_prompt.dir) + .join(CURSOR_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: child_prompt.content.clone(), + encoding: None, + }); + } + } } } output_files diff --git a/sdk/src/domain/output_plans/droid_output_plan.rs b/sdk/src/domain/output_plans/droid_output_plan.rs index ab98de14..f732913e 100644 --- a/sdk/src/domain/output_plans/droid_output_plan.rs +++ b/sdk/src/domain/output_plans/droid_output_plan.rs @@ -17,6 +17,7 @@ const DROID_MEMORY_FILE: &str = "AGENTS.md"; const DROID_GLOBAL_CONFIG_DIR: &str = ".factory"; const DROID_COMMANDS_SUBDIR: &str = "commands"; const DROID_SKILLS_SUBDIR: &str = "skills"; +const AGENTS_OUTPUT_ADAPTOR: &str = "AgentsOutputAdaptor"; const PROJECT_SCOPE: &str = "project"; const GLOBAL_SCOPE: &str = "global"; @@ -72,28 +73,51 @@ fn build_output_files( context: &OutputContext, ) -> Result, CliError> { let mut output_files = Vec::new(); - - for project in get_project_prompt_output_projects(workspace) { - let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { - continue; - }; - - if let Some(root_prompt) = project.root_memory_prompt.as_ref() { - output_files.push(create_text_output_file( - project_root_dir.join(DROID_MEMORY_FILE), - Some(PROJECT_SCOPE), - root_prompt.content.clone(), - )); + let agents_registered = context + .registered_output_plugins + .as_ref() + .map(|plugins| plugins.iter().any(|name| name == AGENTS_OUTPUT_ADAPTOR)) + .unwrap_or(false); + + if agents_registered { + // Fixes #379: Droid project AGENTS files should switch to the + // dedicated global-only payload while AgentsOutputAdaptor is active. + if let Some(global_memory) = context.global_memory.as_ref() { + for project in get_project_prompt_output_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + output_files.push(create_text_output_file( + project_root_dir.join(DROID_MEMORY_FILE), + Some(PROJECT_SCOPE), + global_memory.content.clone(), + )); + } } + } else { + for project in get_project_prompt_output_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; - if let Some(child_prompts) = project.child_memory_prompts.as_ref() { - for child_prompt in child_prompts { + if let Some(root_prompt) = project.root_memory_prompt.as_ref() { output_files.push(create_text_output_file( - resolve_relative_path(&child_prompt.dir).join(DROID_MEMORY_FILE), + project_root_dir.join(DROID_MEMORY_FILE), Some(PROJECT_SCOPE), - child_prompt.content.clone(), + root_prompt.content.clone(), )); } + + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + for child_prompt in child_prompts { + output_files.push(create_text_output_file( + // Fixes #380: Droid must emit nested AGENTS.md files for child prompts. + resolve_relative_path(&child_prompt.dir).join(DROID_MEMORY_FILE), + Some(PROJECT_SCOPE), + child_prompt.content.clone(), + )); + } + } } } @@ -1231,9 +1255,17 @@ mod tests { let skill_file = plan .output_files .iter() - .find(|file| file.path.contains(".factory/skills/tools-test-skill/SKILL.md")) + .find(|file| { + file + .path + .contains(".factory/skills/tools-test-skill/SKILL.md") + }) .unwrap(); - assert!(skill_file.content.starts_with("---\nname: tools-test-skill\n")); + assert!( + skill_file + .content + .starts_with("---\nname: tools-test-skill\n") + ); } } diff --git a/sdk/src/domain/output_plans/gemini_output_plan.rs b/sdk/src/domain/output_plans/gemini_output_plan.rs index 42ec347b..f5c53973 100644 --- a/sdk/src/domain/output_plans/gemini_output_plan.rs +++ b/sdk/src/domain/output_plans/gemini_output_plan.rs @@ -11,6 +11,7 @@ use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; const GEMINI_PLUGIN_NAME: &str = "GeminiCLIOutputAdaptor"; const GEMINI_MEMORY_FILE: &str = "GEMINI.md"; const GEMINI_GLOBAL_CONFIG_DIR: &str = ".gemini"; +const AGENTS_OUTPUT_ADAPTOR: &str = "AgentsOutputAdaptor"; pub fn collect_gemini_output_plan(context_json: &str) -> Result { let context = serde_json::from_str::(context_json)?; @@ -39,36 +40,63 @@ fn build_output_files( context: &OutputContext, ) -> Vec { let mut output_files = Vec::new(); - - for project in get_project_prompt_output_projects(workspace) { - let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { - continue; - }; - - if let Some(root_prompt) = project.root_memory_prompt.as_ref() { - output_files.push(BaseOutputFileDeclarationDto { - path: project_root_dir - .join(GEMINI_MEMORY_FILE) - .to_string_lossy() - .into_owned(), - scope: Some("project".to_string()), - content: root_prompt.content.clone(), - encoding: None, - }); + let agents_registered = context + .registered_output_plugins + .as_ref() + .map(|plugins| plugins.iter().any(|name| name == AGENTS_OUTPUT_ADAPTOR)) + .unwrap_or(false); + + if agents_registered { + // Fixes #379: Gemini should mirror the existing agent-aware plans and emit + // global-only project files while AgentsOutputAdaptor is active. + if let Some(global_memory) = context.global_memory.as_ref() { + for project in get_project_prompt_output_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + output_files.push(BaseOutputFileDeclarationDto { + path: project_root_dir + .join(GEMINI_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some("project".to_string()), + content: global_memory.content.clone(), + encoding: None, + }); + } } + } else { + for project in get_project_prompt_output_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; - if let Some(child_prompts) = project.child_memory_prompts.as_ref() { - for child_prompt in child_prompts { + if let Some(root_prompt) = project.root_memory_prompt.as_ref() { output_files.push(BaseOutputFileDeclarationDto { - path: resolve_relative_path(&child_prompt.dir) + path: project_root_dir .join(GEMINI_MEMORY_FILE) .to_string_lossy() .into_owned(), scope: Some("project".to_string()), - content: child_prompt.content.clone(), + content: root_prompt.content.clone(), encoding: None, }); } + + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + for child_prompt in child_prompts { + output_files.push(BaseOutputFileDeclarationDto { + // Fixes #380: Gemini must keep nested child memory files in non-agent mode. + path: resolve_relative_path(&child_prompt.dir) + .join(GEMINI_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some("project".to_string()), + content: child_prompt.content.clone(), + encoding: None, + }); + } + } } } diff --git a/sdk/src/domain/output_plans/mod.rs b/sdk/src/domain/output_plans/mod.rs index 37b22868..e37474c8 100644 --- a/sdk/src/domain/output_plans/mod.rs +++ b/sdk/src/domain/output_plans/mod.rs @@ -18,6 +18,12 @@ mod regression_tests { use std::fs; use std::path::Path; + use crate::domain::output_context::OutputContext; + use crate::domain::plugin_shared::{ + FilePathKind, GlobalMemoryPrompt, Project, ProjectChildrenMemoryPrompt, + ProjectRootMemoryPrompt, PromptKind, RelativePath, RootPath, Workspace, + }; + #[test] fn resolve_effective_home_dir_is_not_redefined_in_each_output_plan() { // 修复 #378:把 5 份重复的 `resolve_effective_home_dir()` 收口到公共 helper, @@ -43,4 +49,228 @@ mod regression_tests { "resolve_effective_home_dir should be defined only once outside the output plan files" ); } + + fn create_root_prompt(content: &str) -> ProjectRootMemoryPrompt { + ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Root, + dir: RootPath::new(""), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + } + } + + fn create_child_prompt( + project_root: &str, + relative_dir: &str, + content: &str, + ) -> ProjectChildrenMemoryPrompt { + let relative_path = RelativePath::new(relative_dir, project_root); + ProjectChildrenMemoryPrompt { + prompt_type: PromptKind::ProjectChildrenMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Relative, + dir: relative_path.clone(), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + working_child_directory_path: relative_path, + } + } + + fn create_global_memory(content: &str) -> GlobalMemoryPrompt { + GlobalMemoryPrompt { + prompt_type: PromptKind::GlobalMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Relative, + dir: RelativePath::new(".global", "/home/test"), + raw_front_matter: None, + markdown_contents: None, + parent_directory_path: None, + raw_content: None, + } + } + + fn create_project(workspace_root: &str, name: &str) -> Project { + Project { + name: Some(name.to_string()), + dir_from_workspace_path: Some(RelativePath::new(name, workspace_root)), + ..Project::default() + } + } + + fn sample_context_with_child_prompts(workspace_root: &str) -> OutputContext { + let project_root = format!("{workspace_root}/project-a"); + OutputContext { + workspace: Some(Workspace { + directory: RootPath::new(workspace_root), + projects: vec![ + Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(create_root_prompt("workspace root")), + ..Project::default() + }, + Project { + is_prompt_source_project: Some(true), + root_memory_prompt: Some(create_root_prompt("prompt source root")), + ..create_project(workspace_root, "aindex") + }, + Project { + root_memory_prompt: Some(create_root_prompt("project root")), + child_memory_prompts: Some(vec![create_child_prompt( + &project_root, + "packages/api", + "child memory", + )]), + ..create_project(workspace_root, "project-a") + }, + ], + }), + global_memory: Some(create_global_memory("global memory")), + ..OutputContext::default() + } + } + + #[test] + fn regression_380_child_memory_prompts_are_emitted_for_all_target_plans() { + // Fixes #380: every listed output plan must emit child memory files instead of + // silently dropping project.child_memory_prompts. + let workspace_root = "/workspace"; + let child_dir = format!("{workspace_root}/project-a/packages/api"); + let context = sample_context_with_child_prompts(workspace_root); + + let cursor_plan = + crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(&context).unwrap(); + assert!( + cursor_plan.output_files.iter().any( + |file| file.path == format!("{child_dir}/.cursorrules") && file.content == "child memory" + ), + "cursor output plan must emit child .cursorrules files" + ); + + let windsurf_plan = + crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(&context) + .unwrap(); + assert!( + windsurf_plan + .output_files + .iter() + .any(|file| file.path == format!("{child_dir}/.windsurfrules") + && file.content == "child memory"), + "windsurf output plan must emit child .windsurfrules files" + ); + + let trae_plan = + crate::domain::output_plans::trae_output_plan::build_trae_output_plan(&context).unwrap(); + assert!( + trae_plan.output_files.iter().any(|file| { + file.path == format!("{child_dir}/.trae/steering/GLOBAL.md") + && file.content == "child memory" + }), + "trae output plan must emit child steering files" + ); + + let opencode_plan = + crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(&context) + .unwrap(); + assert!( + opencode_plan.output_files.iter().any(|file| { + file.path == format!("{child_dir}/.opencode/AGENTS.md") && file.content == "child memory" + }), + "opencode output plan must emit child AGENTS files" + ); + + let codex_plan = + crate::domain::output_plans::codex_output_plan::build_codex_output_plan(&context).unwrap(); + assert!( + codex_plan + .output_files + .iter() + .any(|file| file.path == format!("{child_dir}/AGENTS.md") && file.content == "child memory"), + "codex output plan must emit child AGENTS files" + ); + } + + #[test] + fn regression_379_agents_output_mode_uses_global_memory_for_project_files() { + // Fixes #379: when AgentsOutputAdaptor is registered, project output files should switch + // to the global-only memory mode that the cursor/warp/windsurf/trae plans already use. + let workspace_root = "/workspace"; + let project_root = format!("{workspace_root}/project-a"); + let context = OutputContext { + registered_output_plugins: Some(vec!["AgentsOutputAdaptor".to_string()]), + ..sample_context_with_child_prompts(workspace_root) + }; + + let claude_plan = + crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(&context) + .unwrap(); + assert!( + claude_plan + .output_files + .iter() + .any(|file| file.path == format!("{project_root}/CLAUDE.md") + && file.content == "global memory"), + "claude output plan must emit global-only CLAUDE.md when AgentsOutputAdaptor is active" + ); + assert!( + !claude_plan + .output_files + .iter() + .any(|file| file.path.contains("/packages/api/CLAUDE.md")), + "claude output plan must omit child project memory files in agents mode" + ); + + let gemini_plan = + crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(&context).unwrap(); + assert!( + gemini_plan + .output_files + .iter() + .any(|file| file.path == format!("{project_root}/GEMINI.md") + && file.content == "global memory"), + "gemini output plan must emit global-only GEMINI.md when AgentsOutputAdaptor is active" + ); + + let droid_plan = + crate::domain::output_plans::droid_output_plan::build_droid_output_plan(&context).unwrap(); + assert!( + droid_plan + .output_files + .iter() + .any(|file| file.path == format!("{project_root}/AGENTS.md") + && file.content == "global memory"), + "droid output plan must emit global-only AGENTS.md when AgentsOutputAdaptor is active" + ); + + let opencode_plan = + crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(&context) + .unwrap(); + assert!( + opencode_plan.output_files.iter().any(|file| { + file.path == format!("{project_root}/.opencode/AGENTS.md") + && file.content == "global memory" + }), + "opencode output plan must emit global-only project memory when AgentsOutputAdaptor is active" + ); + + let codex_plan = + crate::domain::output_plans::codex_output_plan::build_codex_output_plan(&context).unwrap(); + assert!( + codex_plan + .output_files + .iter() + .any(|file| file.path == format!("{project_root}/AGENTS.md") + && file.content == "global memory"), + "codex output plan must emit global-only AGENTS.md when AgentsOutputAdaptor is active" + ); + } } diff --git a/sdk/src/domain/output_plans/opencode_output_plan.rs b/sdk/src/domain/output_plans/opencode_output_plan.rs index 431267d4..665cb240 100644 --- a/sdk/src/domain/output_plans/opencode_output_plan.rs +++ b/sdk/src/domain/output_plans/opencode_output_plan.rs @@ -13,6 +13,7 @@ const OPENCODE_PLUGIN_NAME: &str = "OpencodeCLIOutputAdaptor"; const OPENCODE_MEMORY_FILE: &str = "AGENTS.md"; const OPENCODE_PROJECT_CONFIG_DIR: &str = ".opencode"; const OPENCODE_GLOBAL_CONFIG_DIR: &str = ".config/opencode"; +const AGENTS_OUTPUT_ADAPTOR: &str = "AgentsOutputAdaptor"; const PROJECT_SCOPE: &str = "project"; fn resolve_skill_dir_name(skill: &crate::domain::plugin_shared::SkillPrompt) -> String { @@ -63,27 +64,70 @@ fn build_output_files( ) -> Vec { let mut output_files = Vec::new(); let prompt_projects = get_project_prompt_output_projects(workspace); + let agents_registered = context + .registered_output_plugins + .as_ref() + .map(|plugins| plugins.iter().any(|name| name == AGENTS_OUTPUT_ADAPTOR)) + .unwrap_or(false); + + if agents_registered { + // Fixes #379: Opencode project memory should collapse to the global-only payload + // while AgentsOutputAdaptor is registered. + if let Some(global_memory) = context.global_memory.as_ref() { + for project in &prompt_projects { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + output_files.push(BaseOutputFileDeclarationDto { + path: project_root_dir + .join(OPENCODE_PROJECT_CONFIG_DIR) + .join(OPENCODE_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: global_memory.content.clone(), + encoding: None, + }); + } + } + } else { + let global_memory_content = context.global_memory.as_ref().map(|m| m.content.as_str()); - let global_memory_content = context.global_memory.as_ref().map(|m| m.content.as_str()); + for project in &prompt_projects { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; - for project in &prompt_projects { - let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { - continue; - }; + if let Some(root_prompt) = project.root_memory_prompt.as_ref() { + let combined_content = + combine_global_with_content(global_memory_content, &root_prompt.content); + output_files.push(BaseOutputFileDeclarationDto { + path: project_root_dir + .join(OPENCODE_PROJECT_CONFIG_DIR) + .join(OPENCODE_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: combined_content, + encoding: None, + }); + } - if let Some(root_prompt) = project.root_memory_prompt.as_ref() { - let combined_content = - combine_global_with_content(global_memory_content, &root_prompt.content); - output_files.push(BaseOutputFileDeclarationDto { - path: project_root_dir - .join(OPENCODE_PROJECT_CONFIG_DIR) - .join(OPENCODE_MEMORY_FILE) - .to_string_lossy() - .into_owned(), - scope: Some(PROJECT_SCOPE.to_string()), - content: combined_content, - encoding: None, - }); + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + // Fixes #380: Opencode needs nested .opencode/AGENTS.md files for child prompts. + for child_prompt in child_prompts { + output_files.push(BaseOutputFileDeclarationDto { + path: resolve_relative_path(&child_prompt.dir) + .join(OPENCODE_PROJECT_CONFIG_DIR) + .join(OPENCODE_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: child_prompt.content.clone(), + encoding: None, + }); + } + } } } @@ -336,7 +380,9 @@ fn append_skill_supporting_files( for child_doc in child_docs { output_files.push(BaseOutputFileDeclarationDto { path: skill_sub_dir - .join(resolve_child_doc_output_relative_path(&child_doc.relative_path)) + .join(resolve_child_doc_output_relative_path( + &child_doc.relative_path, + )) .to_string_lossy() .into_owned(), scope: Some(PROJECT_SCOPE.to_string()), @@ -356,9 +402,7 @@ fn append_skill_supporting_files( scope: Some(PROJECT_SCOPE.to_string()), content: resource.content.clone(), encoding: match resource.encoding { - crate::domain::plugin_shared::SkillResourceEncoding::Base64 => { - Some("base64".to_string()) - } + crate::domain::plugin_shared::SkillResourceEncoding::Base64 => Some("base64".to_string()), crate::domain::plugin_shared::SkillResourceEncoding::Text => None, }, }); @@ -367,7 +411,10 @@ fn append_skill_supporting_files( if let Some(mcp_config) = skill.mcp_config.as_ref() { output_files.push(BaseOutputFileDeclarationDto { - path: skill_sub_dir.join("mcp.json").to_string_lossy().into_owned(), + path: skill_sub_dir + .join("mcp.json") + .to_string_lossy() + .into_owned(), scope: Some(PROJECT_SCOPE.to_string()), content: mcp_config.raw_content.clone(), encoding: None, @@ -781,61 +828,68 @@ mod tests { description: Some("desc".to_string()), ..SkillYAMLFrontMatter::default() }), - child_docs: Some(vec![SkillChildDoc { - prompt_type: PromptKind::SkillChildDoc, - content: "guide".to_string(), - length: 5, - file_path_kind: crate::infra::path_types::FilePathKind::Relative, - relative_path: "guide.mdx".to_string(), - dir: crate::infra::path_types::RelativePath::new( - "guide.mdx", - "/workspace/aindex/skills/test", - ), - raw_front_matter: None, - markdown_ast: None, - markdown_contents: None, - }, SkillChildDoc { - prompt_type: PromptKind::SkillChildDoc, - content: "linux-wsl".to_string(), - length: 9, - file_path_kind: crate::infra::path_types::FilePathKind::Relative, - relative_path: "references/linux-wsl.mdx".to_string(), - dir: crate::infra::path_types::RelativePath::new( - "references/linux-wsl.mdx", - "/workspace/aindex/skills/test", - ), - raw_front_matter: None, - markdown_ast: None, - markdown_contents: None, - }]), - resources: Some(vec![SkillResource { - prompt_type: PromptKind::SkillResource, - extension: "txt".to_string(), - file_name: "notes.txt".to_string(), - relative_path: "assets/notes.txt".to_string(), - content: "notes".to_string(), - encoding: SkillResourceEncoding::Text, - length: 5, - mime_type: None, - }, SkillResource { - prompt_type: PromptKind::SkillResource, - extension: "sh".to_string(), - file_name: "capture-workflow.sh".to_string(), - relative_path: "templates/capture-workflow.sh".to_string(), - content: "#!/usr/bin/env bash\necho capture\n".to_string(), - encoding: SkillResourceEncoding::Text, - length: 32, - mime_type: None, - }, SkillResource { - prompt_type: PromptKind::SkillResource, - extension: "bin".to_string(), - file_name: "blob.bin".to_string(), - relative_path: "assets/blob.bin".to_string(), - content: "AAEC".to_string(), - encoding: SkillResourceEncoding::Base64, - length: 3, - mime_type: Some("application/octet-stream".to_string()), - }]), + child_docs: Some(vec![ + SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "guide".to_string(), + length: 5, + file_path_kind: crate::infra::path_types::FilePathKind::Relative, + relative_path: "guide.mdx".to_string(), + dir: crate::infra::path_types::RelativePath::new( + "guide.mdx", + "/workspace/aindex/skills/test", + ), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }, + SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "linux-wsl".to_string(), + length: 9, + file_path_kind: crate::infra::path_types::FilePathKind::Relative, + relative_path: "references/linux-wsl.mdx".to_string(), + dir: crate::infra::path_types::RelativePath::new( + "references/linux-wsl.mdx", + "/workspace/aindex/skills/test", + ), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }, + ]), + resources: Some(vec![ + SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "txt".to_string(), + file_name: "notes.txt".to_string(), + relative_path: "assets/notes.txt".to_string(), + content: "notes".to_string(), + encoding: SkillResourceEncoding::Text, + length: 5, + mime_type: None, + }, + SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "sh".to_string(), + file_name: "capture-workflow.sh".to_string(), + relative_path: "templates/capture-workflow.sh".to_string(), + content: "#!/usr/bin/env bash\necho capture\n".to_string(), + encoding: SkillResourceEncoding::Text, + length: 32, + mime_type: None, + }, + SkillResource { + prompt_type: PromptKind::SkillResource, + extension: "bin".to_string(), + file_name: "blob.bin".to_string(), + relative_path: "assets/blob.bin".to_string(), + content: "AAEC".to_string(), + encoding: SkillResourceEncoding::Base64, + length: 3, + mime_type: Some("application/octet-stream".to_string()), + }, + ]), mcp_config: Some(SkillMcpConfig { prompt_type: PromptKind::SkillMcpConfig, mcp_servers: std::collections::HashMap::new(), @@ -890,10 +944,26 @@ mod tests { ); assert!(skill_paths.iter().any(|path| path.ends_with("SKILL.md"))); assert!(skill_paths.iter().any(|path| path.ends_with("guide.md"))); - assert!(skill_paths.iter().any(|path| path.ends_with("references/linux-wsl.md"))); - assert!(skill_paths.iter().any(|path| path.ends_with("assets/notes.txt"))); - assert!(skill_paths.iter().any(|path| path.ends_with("templates/capture-workflow.sh"))); - assert!(skill_paths.iter().any(|path| path.ends_with("assets/blob.bin"))); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("references/linux-wsl.md")) + ); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("assets/notes.txt")) + ); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("templates/capture-workflow.sh")) + ); + assert!( + skill_paths + .iter() + .any(|path| path.ends_with("assets/blob.bin")) + ); assert!(skill_paths.iter().any(|path| path.ends_with("mcp.json"))); let binary_resource = plan @@ -945,9 +1015,15 @@ mod tests { }) .unwrap(); - assert!(skill_file.content.contains("name: dev-tools-reverse-engineering")); assert!( - skill_file.content.contains("skill: aindex/skills/dev-tools/reverse-engineering") + skill_file + .content + .contains("name: dev-tools-reverse-engineering") + ); + assert!( + skill_file + .content + .contains("skill: aindex/skills/dev-tools/reverse-engineering") ); } } diff --git a/sdk/src/domain/output_plans/trae_output_plan.rs b/sdk/src/domain/output_plans/trae_output_plan.rs index 7edfc554..a9db4e4f 100644 --- a/sdk/src/domain/output_plans/trae_output_plan.rs +++ b/sdk/src/domain/output_plans/trae_output_plan.rs @@ -78,6 +78,24 @@ fn build_output_files( content, encoding: None, }); + + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + // Fixes #380: Trae must emit nested steering files for child memory prompts. + for child_prompt in child_prompts { + let child_steering_dir = resolve_relative_path(&child_prompt.dir) + .join(".trae") + .join("steering"); + output_files.push(BaseOutputFileDeclarationDto { + path: child_steering_dir + .join(TRAE_STEERING_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: child_prompt.content.clone(), + encoding: None, + }); + } + } } } diff --git a/sdk/src/domain/output_plans/windsurf_output_plan.rs b/sdk/src/domain/output_plans/windsurf_output_plan.rs index 7eed372f..acd06219 100644 --- a/sdk/src/domain/output_plans/windsurf_output_plan.rs +++ b/sdk/src/domain/output_plans/windsurf_output_plan.rs @@ -77,6 +77,20 @@ fn build_output_files( encoding: None, }); } + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + // Fixes #380: Windsurf needs nested .windsurfrules files for child memory prompts. + for child_prompt in child_prompts { + output_files.push(BaseOutputFileDeclarationDto { + path: resolve_relative_path(&child_prompt.dir) + .join(WINDSURF_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content: child_prompt.content.clone(), + encoding: None, + }); + } + } } } output_files diff --git a/sdk/src/infra/deno_runtime.rs b/sdk/src/infra/deno_runtime.rs index c6fb163f..dcaf13a9 100644 --- a/sdk/src/infra/deno_runtime.rs +++ b/sdk/src/infra/deno_runtime.rs @@ -6,7 +6,7 @@ use std::borrow::Cow; use std::cell::RefCell; use std::collections::{BTreeMap, HashMap}; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::rc::Rc; use deno_ast::MediaType; @@ -48,14 +48,24 @@ impl DenoRuntime { return Err(format!("Script not found: {}", script_path.display())); } + let parsed_context: serde_json::Value = serde_json::from_str(context_json) + .map_err(|error| format!("Invalid runtime context JSON: {error}"))?; + let resolved_script_path = ensure_allowed_script_path(script_path, &parsed_context)?; + tokio::runtime::Builder::new_current_thread() .enable_all() .build() .map_err(|error| format!("Failed to create embedded Deno runtime: {error}"))? - .block_on(Self::execute_ts_async(script_path, context_json)) + .block_on(Self::execute_ts_async( + &resolved_script_path, + parsed_context, + )) } - async fn execute_ts_async(script_path: &Path, context_json: &str) -> Result { + async fn execute_ts_async( + script_path: &Path, + parsed_context: serde_json::Value, + ) -> Result { let source_map_store = Rc::new(RefCell::new(HashMap::new())); let module_loader = Rc::new(TypescriptModuleLoader { source_maps: source_map_store, @@ -71,8 +81,6 @@ impl DenoRuntime { let main_module = resolve_path(&script_path.to_string_lossy(), ¤t_dir) .map_err(|error| format!("Unable to resolve script module: {error}"))?; - let parsed_context: serde_json::Value = serde_json::from_str(context_json) - .map_err(|error| format!("Invalid runtime context JSON: {error}"))?; let env_map = allowed_environment(&parsed_context); let bootstrap = format!( r#" @@ -174,6 +182,9 @@ globalThis.Deno = {{ return Err("Proxy context must be a JSON object".to_string()); } }; + // Fixes #360: proxy execution should stay anchored to the proxy's own + // directory instead of allowing arbitrary script roots from the callsite. + append_allowed_script_root(&mut context, proxy_path.parent()); context.insert( "logicalPath".to_string(), serde_json::Value::String(logical_path.to_string()), @@ -233,6 +244,95 @@ globalThis.Deno = {{ } } +fn append_allowed_script_root( + context: &mut serde_json::Map, + root: Option<&Path>, +) { + let Some(root) = root else { + return; + }; + let root = root.to_string_lossy().into_owned(); + let roots = context + .entry("allowedScriptRoots".to_string()) + .or_insert_with(|| serde_json::Value::Array(Vec::new())); + + if let serde_json::Value::Array(values) = roots + && !values + .iter() + .any(|value| value.as_str() == Some(root.as_str())) + { + values.push(serde_json::Value::String(root)); + } +} + +fn resolve_existing_path(path: &Path, label: &str) -> Result { + let absolute = if path.is_absolute() { + path.to_path_buf() + } else { + std::env::current_dir() + .map_err(|error| format!("Unable to resolve current directory: {error}"))? + .join(path) + }; + + absolute + .canonicalize() + .map_err(|error| format!("Unable to resolve {label}: {error}")) +} + +fn allowed_script_roots(context: &serde_json::Value) -> Result, String> { + let mut roots = Vec::new(); + + if let Some(values) = context + .get("allowedScriptRoots") + .and_then(serde_json::Value::as_array) + { + for value in values { + let Some(path) = value.as_str() else { + continue; + }; + roots.push(resolve_existing_path( + Path::new(path), + "allowed script root", + )?); + } + } + + for key in ["aindexDir", "workspaceDir"] { + if let Some(path) = context.get(key).and_then(serde_json::Value::as_str) { + roots.push(resolve_existing_path(Path::new(path), key)?); + } + } + + if roots.is_empty() { + return Err( + "Script execution requires at least one allowed script root in context".to_string(), + ); + } + + roots.sort(); + roots.dedup(); + Ok(roots) +} + +fn ensure_allowed_script_path( + script_path: &Path, + context: &serde_json::Value, +) -> Result { + let resolved_script = resolve_existing_path(script_path, "script path")?; + let roots = allowed_script_roots(context)?; + + // Fixes #360: execute_ts must fail closed unless the caller proves the + // script lives under an explicit allowlisted root. + if roots.iter().any(|root| resolved_script.starts_with(root)) { + return Ok(resolved_script); + } + + Err(format!( + "Script path is outside allowed script roots: {}", + resolved_script.display() + )) +} + impl Default for DenoRuntime { fn default() -> Self { Self @@ -419,10 +519,13 @@ mod tests { let tmp = TempDir::new().unwrap(); let script_path = tmp.path().join("echo.ts"); std::fs::write(&script_path, "console.log('embedded-deno-ok');").unwrap(); + let context = serde_json::json!({ + "allowedScriptRoots": [tmp.path().to_string_lossy().to_string()] + }); let result = with_path_removed(|| { let runtime = DenoRuntime::new().unwrap(); - runtime.execute_ts(&script_path, "{}") + runtime.execute_ts(&script_path, &context.to_string()) }); assert!(result.is_ok(), "expected embedded runtime, got: {result:?}"); @@ -495,7 +598,12 @@ console.log(JSON.stringify({ ) .unwrap(); - let result = runtime.execute_ts(&script_path, "{}").unwrap(); + let context = serde_json::json!({ + "allowedScriptRoots": [tmp.path().to_string_lossy().to_string()] + }); + let result = runtime + .execute_ts(&script_path, &context.to_string()) + .unwrap(); let parsed: serde_json::Value = serde_json::from_str(result.trim()).unwrap(); assert_eq!(parsed["hasSecret"], false); @@ -522,6 +630,7 @@ console.log(JSON.stringify({ .unwrap(); let context = serde_json::json!({ + "allowedScriptRoots": [tmp.path().to_string_lossy().to_string()], "allowedEnv": ["TNMSD_ALLOWED_ENV_FOR_TEST", "TNMSD_MISSING_ENV_FOR_TEST"] }); let result = runtime @@ -536,4 +645,29 @@ console.log(JSON.stringify({ ); }); } + + #[test] + fn test_execute_ts_rejects_scripts_outside_allowed_roots() { + let runtime = DenoRuntime::new().unwrap(); + let tmp = TempDir::new().unwrap(); + let allowed_root = tmp.path().join("allowed"); + let blocked_root = tmp.path().join("blocked"); + std::fs::create_dir_all(&allowed_root).unwrap(); + std::fs::create_dir_all(&blocked_root).unwrap(); + let blocked_script = blocked_root.join("echo.ts"); + std::fs::write(&blocked_script, "console.log('blocked');").unwrap(); + + let context = serde_json::json!({ + "allowedScriptRoots": [allowed_root.to_string_lossy().to_string()] + }); + let result = runtime.execute_ts(&blocked_script, &context.to_string()); + + assert!( + result + .as_ref() + .err() + .is_some_and(|error| error.contains("outside allowed script roots")), + "unexpected result: {result:?}" + ); + } } diff --git a/sdk/src/repositories/skill.rs b/sdk/src/repositories/skill.rs index 81d51c2d..1a8a833f 100644 --- a/sdk/src/repositories/skill.rs +++ b/sdk/src/repositories/skill.rs @@ -177,8 +177,10 @@ fn strip_leading_front_matter(content: &str) -> &str { fn strip_leading_export_statements(content: &str) -> String { let export_default_regex = regex_lite::Regex::new(r"(?s)^\s*export\s+default\s*\{[\s\S]*?\}\s*;?\s*").ok(); - let named_export_regex = - regex_lite::Regex::new(r#"(?m)^\s*export\s+(?:const|let)\s+description\s*=\s*['"`][^'"`]+['"`]\s*;?\s*$\n?"#).ok(); + let named_export_regex = regex_lite::Regex::new( + r#"(?m)^\s*export\s+(?:const|let)\s+description\s*=\s*['"`][^'"`]+['"`]\s*;?\s*$\n?"#, + ) + .ok(); let without_default = if let Some(re) = export_default_regex { re.replace(content, "").into_owned() @@ -495,8 +497,7 @@ fn collect_expected_child_doc_paths( if !file_name.ends_with(".src.mdx") { continue; } - if current_dir == skill_src_dir - && (file_name == "skill.src.mdx" || file_name == "desc.src.mdx") + if current_dir == skill_src_dir && (file_name == "skill.src.mdx" || file_name == "desc.src.mdx") { continue; } @@ -746,7 +747,11 @@ fn collect_skill_directories(skills_dir: &Path) -> Result }; for entry in entries.flatten() { - if !entry.file_type().map(|file_type| file_type.is_dir()).unwrap_or(false) { + if !entry + .file_type() + .map(|file_type| file_type.is_dir()) + .unwrap_or(false) + { continue; } @@ -1368,7 +1373,11 @@ mod tests { "#!/usr/bin/env bash\necho auth\n", ) .unwrap(); - fs::write(skill_dir.join("assets").join("logo.png"), [0x89_u8, 0x50, 0x4E, 0x47]).unwrap(); + fs::write( + skill_dir.join("assets").join("logo.png"), + [0x89_u8, 0x50, 0x4E, 0x47], + ) + .unwrap(); fs::write( skill_dir.join("mcp.json"), r#"{"mcpServers":{"browser":{"command":"agent-browser"}}}"#, @@ -1422,7 +1431,10 @@ mod tests { .find(|resource| resource["relativePath"] == "assets/logo.png") .unwrap(); assert_eq!(logo["encoding"], "base64"); - assert_eq!(skill["mcpConfig"]["mcpServers"]["browser"]["command"], "agent-browser"); + assert_eq!( + skill["mcpConfig"]["mcpServers"]["browser"]["command"], + "agent-browser" + ); } #[test] diff --git a/sdk/src/services/prompt_service.rs b/sdk/src/services/prompt_service.rs index 12d69762..4513b6dc 100644 --- a/sdk/src/services/prompt_service.rs +++ b/sdk/src/services/prompt_service.rs @@ -333,6 +333,13 @@ fn strip_prompt_extension(file_path: &str) -> String { } } +fn safe_listed_relative_path(cwd: &Path, entry_path: &Path) -> Option { + // Fixes #361: if strip_prefix fails, skip the entry instead of leaking an + // absolute path through the prompt catalog. + let relative_path = entry_path.strip_prefix(cwd).ok()?; + Some(normalize_slash_path(&relative_path.to_string_lossy())) +} + fn list_files(cwd: &Path, suffixes: &[&str]) -> Vec { if !cwd.is_dir() { return vec![]; @@ -344,9 +351,9 @@ fn list_files(cwd: &Path, suffixes: &[&str]) -> Vec { } if let Some(name) = entry.file_name().to_str() && suffixes.iter().any(|s| name.ends_with(s)) + && let Some(rel) = safe_listed_relative_path(cwd, entry.path()) { - let rel = entry.path().strip_prefix(cwd).unwrap_or(entry.path()); - results.push(normalize_slash_path(&rel.to_string_lossy())); + results.push(rel); } } results @@ -726,7 +733,11 @@ fn collect_skill_prompt_ids(env: &ResolvedPromptEnvironment) -> Vec { let mut skill_names = BTreeSet::new(); for entry in fs::read_dir(&root).into_iter().flatten().flatten() { - if !entry.file_type().map(|file_type| file_type.is_dir()).unwrap_or(false) { + if !entry + .file_type() + .map(|file_type| file_type.is_dir()) + .unwrap_or(false) + { continue; } @@ -740,7 +751,11 @@ fn collect_skill_prompt_ids(env: &ResolvedPromptEnvironment) -> Vec { continue; } - for nested_entry in fs::read_dir(&first_level_dir).into_iter().flatten().flatten() { + for nested_entry in fs::read_dir(&first_level_dir) + .into_iter() + .flatten() + .flatten() + { let nested_path = nested_entry.path(); if !nested_entry .file_type() @@ -1248,8 +1263,18 @@ mod tests { let skill = build_prompt_definition_from_id("skill:tools/demo", &env).unwrap(); assert_eq!(skill.prompt_id, "skill:tools/demo"); - assert!(skill.paths.zh.ends_with("aindex/skills/tools/demo/skill.src.mdx")); - assert!(skill.paths.en.ends_with("aindex/skills/tools/demo/skill.mdx")); + assert!( + skill + .paths + .zh + .ends_with("aindex/skills/tools/demo/skill.src.mdx") + ); + assert!( + skill + .paths + .en + .ends_with("aindex/skills/tools/demo/skill.mdx") + ); let child = build_prompt_definition_from_id("skill-child-doc:tools/demo/guides/setup", &env).unwrap(); @@ -1279,7 +1304,11 @@ mod tests { fs::write(legacy_dir.join("skill.mdx"), "Legacy").unwrap(); fs::write(legacy_dir.join("guide.mdx"), "Legacy guide").unwrap(); - fs::write(env.aindex_dir.join("skills").join("tools").join("desc.mdx"), "Tools").unwrap(); + fs::write( + env.aindex_dir.join("skills").join("tools").join("desc.mdx"), + "Tools", + ) + .unwrap(); fs::write(categorized_dir.join("skill.mdx"), "Categorized").unwrap(); fs::write(categorized_dir.join("guides").join("setup.mdx"), "Setup").unwrap(); @@ -1297,4 +1326,16 @@ mod tests { prompt_ids ); } + + #[test] + fn safe_listed_relative_path_drops_non_descendant_entries() { + let tmp = TempDir::new().unwrap(); + let leaked_path = tmp.path().join("outside").join("prompt.mdx"); + let listed = safe_listed_relative_path(tmp.path().join("workspace").as_path(), &leaked_path); + + assert!( + listed.is_none(), + "strip_prefix fallback must not leak absolute paths" + ); + } } From 87b723e9b10a58173eedd740f633d7388d861884 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 3 May 2026 01:53:07 +0800 Subject: [PATCH 42/45] feat: update cleanup logic to handle child memory prompts in build_cleanup function --- .../output_plans/claude_code_output_plan.rs | 27 +++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/sdk/src/domain/output_plans/claude_code_output_plan.rs b/sdk/src/domain/output_plans/claude_code_output_plan.rs index dace7c70..23ba1a55 100644 --- a/sdk/src/domain/output_plans/claude_code_output_plan.rs +++ b/sdk/src/domain/output_plans/claude_code_output_plan.rs @@ -35,7 +35,7 @@ pub fn build_claude_code_output_plan( Ok(BaseOutputPluginPlanDto { plugin_name: CLAUDE_CODE_PLUGIN_NAME.to_string(), output_files: build_output_files(workspace, context), - cleanup: build_cleanup(workspace), + cleanup: build_cleanup(workspace, context), }) } @@ -758,8 +758,9 @@ mod tests { } } -fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { +fn build_cleanup(workspace: &Workspace, _context: &OutputContext) -> CleanupDeclarationsDto { let mut delete = Vec::new(); + let prompt_projects = get_project_prompt_output_projects(workspace); for project in get_project_output_projects(workspace) { let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { @@ -778,6 +779,28 @@ fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { label: Some("delete.project".to_string()), }); + // #385: clean must keep deleting generated child CLAUDE.md files even when + // claudeCode is later disabled, so cleanup needs explicit child targets too. + if let Some(prompt_project) = prompt_projects.iter().copied().find(|candidate| { + resolve_project_root_dir(workspace, candidate) + .as_ref() + .is_some_and(|candidate_root_dir| candidate_root_dir == &project_root_dir) + }) && let Some(child_prompts) = prompt_project.child_memory_prompts.as_ref() { + for child_prompt in child_prompts { + delete.push(CleanupTargetDto { + path: resolve_relative_path(&child_prompt.dir) + .join(CLAUDE_CODE_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + kind: CleanupTargetKindDto::File, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: Some(PROJECT_SCOPE.to_string()), + label: Some("delete.project.child".to_string()), + }); + } + } + let settings_dir = project_root_dir.join(".claude"); delete.push(CleanupTargetDto { path: settings_dir From f380081f4bf45d19c8a4d4889bb28f324c4a6ac5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 3 May 2026 01:58:06 +0800 Subject: [PATCH 43/45] Refactor local tests for rules, opencode, and trae with isolated fixtures - Introduced `IsolatedRulesFixture`, `IsolatedOpencodeFixture`, and `IsolatedTraeFixture` to encapsulate test setup and environment management for rules, opencode, and trae tests. - Updated tests to use the new fixture structures, improving isolation and reducing dependencies on the host workspace. - Enhanced the `write_rules_config`, `write_opencode_config`, and `write_trae_config` functions to streamline the creation of necessary configuration files for each test. - Improved assertions in the tests to ensure clarity and correctness, including checks for file existence and content validation. - Removed redundant code and comments, ensuring a cleaner and more maintainable test suite. --- cli/local-tests/tests/agents_md_smoke.rs | 374 +++++---- cli/local-tests/tests/claude_smoke.rs | 364 ++++++--- cli/local-tests/tests/clean_blackbox.rs | 385 +++++---- cli/local-tests/tests/codex_smoke.rs | 734 +++++++++++------- cli/local-tests/tests/install_smoke.rs | 509 +++++++----- cli/local-tests/tests/logging_clean.rs | 136 +++- .../tests/logging_install_observability.rs | 144 +++- cli/local-tests/tests/logging_levels.rs | 149 +++- .../tests/opencode_agent_mode_validation.rs | 53 +- cli/local-tests/tests/opencode_smoke.rs | 705 +++++++---------- cli/local-tests/tests/rules_source_smoke.rs | 255 ++++-- cli/local-tests/tests/support/opencode.rs | 366 +++++++++ cli/local-tests/tests/trae_smoke.rs | 315 +++++--- 13 files changed, 2801 insertions(+), 1688 deletions(-) create mode 100644 cli/local-tests/tests/support/opencode.rs diff --git a/cli/local-tests/tests/agents_md_smoke.rs b/cli/local-tests/tests/agents_md_smoke.rs index cd8a0028..e42e4aee 100644 --- a/cli/local-tests/tests/agents_md_smoke.rs +++ b/cli/local-tests/tests/agents_md_smoke.rs @@ -1,127 +1,168 @@ -//! 本地裸机 AGENTS.md 测试:验证 AgentsOutputAdaptor 生成的 AGENTS.md 文件。 +//! Isolated AGENTS.md smoke tests for AgentsOutputAdaptor. //! -//! **前提**:项目已配置,aindex 目录已存在且有内容。 +//! These tests intentionally avoid the caller's real `~/.aindex/.tnmsc.json` +//! because the shared local-test runner otherwise follows the host +//! `workspaceDir` and mutates unrelated workspaces. use std::fs; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use tnmsc_local_tests::LocalTestRunner; -/// 临时修改全局配置以禁用 agents_md 插件,测试结束后自动恢复。 -struct GlobalConfigGuard { - config_path: PathBuf, - original_content: Option, +struct IsolatedAgentsFixture { + runner: LocalTestRunner, + temp_home: PathBuf, + project_dir: PathBuf, + aindex_project_dir: PathBuf, } -impl GlobalConfigGuard { - fn with_agents_md_disabled() -> Self { - let config_path = tnmsc_local_tests::home_dir() - .join(".aindex") - .join(".tnmsc.json"); - - let original_content = if config_path.is_file() { - fs::read_to_string(&config_path).ok() - } else { - None - }; - - let mut config_json: serde_json::Value = original_content - .as_ref() - .and_then(|c| serde_json::from_str(c).ok()) - .unwrap_or_else(|| serde_json::json!({})); - - if let Some(obj) = config_json.as_object_mut() { - let plugins = obj - .entry("plugins") - .or_insert_with(|| serde_json::json!({})); - if let Some(p) = plugins.as_object_mut() { - p.insert("agentsMd".into(), serde_json::json!(false)); - p.insert("claudeCode".into(), serde_json::json!(true)); - p.insert("opencode".into(), serde_json::json!(true)); - p.insert("git".into(), serde_json::json!(true)); - } - } - - let new_content = format!("{}\n", serde_json::to_string_pretty(&config_json).unwrap()); - fs::write(&config_path, new_content).expect("should write temp global config"); +impl IsolatedAgentsFixture { + fn new(agents_enabled: bool) -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-agents-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let project_dir = workspace_dir.join("memory-sync"); + let aindex_project_dir = workspace_dir.join("aindex").join("app").join("memory-sync"); + + fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + fs::create_dir_all(project_dir.join(".github")).unwrap(); + fs::create_dir_all(aindex_project_dir.join(".github")).unwrap(); + + // issue local-tests-agents-isolation: agents smoke tests must validate + // generated AGENTS.md files in a self-owned fixture instead of the host workspace. + write_config(&temp_home, &workspace_dir, agents_enabled); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Issue sync root\n\nProject root instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join(".github").join("agt.mdx"), + "# Issue sync child\n\nChild instructions\n", + ) + .unwrap(); Self { - config_path, - original_content, + runner: LocalTestRunner::with_cwd(&project_dir), + temp_home, + project_dir, + aindex_project_dir, } } + + fn run(&self, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.temp_home.to_string_lossy().into_owned(); + self + .runner + .run_at_with_env(&self.project_dir, args, &[("HOME", &temp_home)]) + } + + fn install(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["install"]) + } + + fn clean(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["clean"]) + } + + fn project_agents_path(&self) -> PathBuf { + self.project_dir.join("AGENTS.md") + } + + fn child_agents_path(&self) -> PathBuf { + self.project_dir.join(".github").join("AGENTS.md") + } + + fn overwrite_agents_enabled(&self, enabled: bool) { + let workspace_dir = self.project_dir.parent().unwrap_or(&self.project_dir); + write_config(&self.temp_home, workspace_dir, enabled); + } } -impl Drop for GlobalConfigGuard { - fn drop(&mut self) { - match &self.original_content { - Some(content) => { - let _ = fs::write(&self.config_path, content); +fn write_config(temp_home: &Path, workspace_dir: &Path, agents_enabled: bool) { + fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": agents_enabled, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": false, + "codex": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false } - None => { - let _ = fs::remove_file(&self.config_path); - } - } - } + }) + .to_string(), + ) + .unwrap(); } -/// Verify that install generates both the project-root AGENTS.md and a child -/// .github/AGENTS.md with non-empty content. #[test] fn local_agents_md_install_generates_project_agents_md() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedAgentsFixture::new(true); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let clean = fixture.clean(); + clean.assert_success("isolated tnmsc clean before install"); - let install = runner.install(); - install.assert_success("tnmsc install"); + let install = fixture.install(); + install.assert_success("isolated tnmsc install"); assert!( - runner.agents_md_project_file_exists(), - "~/workspace/memory-sync/AGENTS.md should be generated after install" + fixture.project_agents_path().is_file(), + "project AGENTS.md should be generated after install" ); - - let content = runner - .read_agents_md_project_file() - .expect("AGENTS.md should be readable"); - assert!(!content.is_empty(), "AGENTS.md should not be empty"); - assert!( - runner.agents_md_child_file_exists(".github"), - "~/workspace/memory-sync/.github/AGENTS.md should be generated after install" + fixture.child_agents_path().is_file(), + "child .github/AGENTS.md should be generated after install" + ); + assert!( + !fs::read_to_string(fixture.project_agents_path()) + .unwrap() + .trim() + .is_empty(), + "project AGENTS.md should not be empty" ); - - let child_content = runner - .read_agents_md_child_file(".github") - .expect(".github/AGENTS.md should be readable"); assert!( - !child_content.is_empty(), - ".github/AGENTS.md should not be empty" + !fs::read_to_string(fixture.child_agents_path()) + .unwrap() + .trim() + .is_empty(), + "child .github/AGENTS.md should not be empty" ); } -/// Verify that the generated AGENTS.md content exactly matches the aindex -/// source `app/memory-sync/agt.mdx`. #[test] fn local_agents_md_content_matches_aindex_source() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + let fixture = IsolatedAgentsFixture::new(true); - let aindex_content = runner - .read_aindex_file("app/memory-sync/agt.mdx") - .expect("aindex source agt.mdx should be readable"); + fixture + .clean() + .assert_success("isolated tnmsc clean before install"); + fixture.install().assert_success("isolated tnmsc install"); - let generated_content = runner - .read_agents_md_project_file() - .expect("AGENTS.md should be readable after install"); + let aindex_content = fs::read_to_string(fixture.aindex_project_dir.join("agt.mdx")).unwrap(); + let generated_content = fs::read_to_string(fixture.project_agents_path()).unwrap(); assert_eq!( aindex_content.trim(), @@ -130,26 +171,18 @@ fn local_agents_md_content_matches_aindex_source() { ); } -/// Verify that the generated .github/AGENTS.md content exactly matches the aindex -/// source `app/memory-sync/.github/agt.mdx`. #[test] fn local_agents_md_child_content_matches_aindex_source() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedAgentsFixture::new(true); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before install"); + fixture.install().assert_success("isolated tnmsc install"); - let install = runner.install(); - install.assert_success("tnmsc install"); - - let aindex_child_content = runner - .read_aindex_file("app/memory-sync/.github/agt.mdx") - .expect("aindex source .github/agt.mdx should be readable"); - - let generated_child_content = runner - .read_agents_md_child_file(".github") - .expect(".github/AGENTS.md should be readable after install"); + let aindex_child_content = + fs::read_to_string(fixture.aindex_project_dir.join(".github").join("agt.mdx")).unwrap(); + let generated_child_content = fs::read_to_string(fixture.child_agents_path()).unwrap(); assert_eq!( aindex_child_content.trim(), @@ -158,126 +191,81 @@ fn local_agents_md_child_content_matches_aindex_source() { ); } -/// Verify that `tnmsc clean` removes both the project-root AGENTS.md and the child -/// .github/AGENTS.md. #[test] fn local_agents_md_clean_removes_files() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let install = runner.install(); - install.assert_success("tnmsc install before clean"); + let fixture = IsolatedAgentsFixture::new(true); - assert!( - runner.agents_md_project_file_exists(), - "AGENTS.md should exist after install" - ); - assert!( - runner.agents_md_child_file_exists(".github"), - ".github/AGENTS.md should exist after install" - ); + fixture + .install() + .assert_success("isolated tnmsc install before clean"); + assert!(fixture.project_agents_path().is_file()); + assert!(fixture.child_agents_path().is_file()); - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + fixture.clean().assert_success("isolated tnmsc clean"); assert!( - !runner.agents_md_project_file_exists(), - "AGENTS.md should be removed after clean" + !fixture.project_agents_path().exists(), + "project AGENTS.md should be removed after clean" ); assert!( - !runner.agents_md_child_file_exists(".github"), - ".github/AGENTS.md should be removed after clean" + !fixture.child_agents_path().exists(), + "child .github/AGENTS.md should be removed after clean" ); } -/// Verify that when `plugins.agentsMd` is set to `false`, install does NOT generate -/// AGENTS.md files. #[test] fn local_agents_md_disabled_by_config() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - // 手动清除可能由前面测试遗留的 AGENTS.md 文件。 - // 当 agents_md 被禁用时,clean 服务不会生成对应的 cleanup target, - // 因此无法依赖 tnmsc clean 来清理这些文件。 - fn remove_all_agents_md(dir: &std::path::Path) { - let Ok(entries) = std::fs::read_dir(dir) else { - return; - }; - for entry in entries.flatten() { - let path = entry.path(); - let Ok(ft) = entry.file_type() else { continue }; - if ft.is_dir() { - if let Some(name) = path.file_name() { - let name = name.to_string_lossy(); - if name == ".git" || name == "node_modules" || name == "target" { - continue; - } - } - if path.join("AGENTS.md").is_file() { - let _ = std::fs::remove_file(path.join("AGENTS.md")); - } - remove_all_agents_md(&path); - } - } - } - if runner.cwd().join("AGENTS.md").is_file() { - let _ = std::fs::remove_file(runner.cwd().join("AGENTS.md")); - } - remove_all_agents_md(runner.cwd()); - - let _guard = GlobalConfigGuard::with_agents_md_disabled(); + let fixture = IsolatedAgentsFixture::new(false); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before disabled install"); - let install = runner.install(); - install.assert_success("tnmsc install"); + let install = fixture.install(); + install.assert_success("isolated tnmsc install with agentsMd disabled"); assert!( - !runner.agents_md_project_file_exists(), - "AGENTS.md should NOT be generated when agents_md is disabled" + !fixture.project_agents_path().exists(), + "project AGENTS.md should not be generated when agentsMd is disabled" ); assert!( - !runner.agents_md_child_file_exists(".github"), - ".github/AGENTS.md should NOT be generated when agents_md is disabled" + !fixture.child_agents_path().exists(), + "child .github/AGENTS.md should not be generated when agentsMd is disabled" ); } -/// Regression guard: `tnmsc clean` must remove AGENTS.md files even when the agentsMd -/// plugin is currently disabled. Design rationale: if a user disabled the plugin after -/// previous installs, stale files must still be cleaned. Clean behavior is independent -/// of plugin switches; only install respects the plugin toggle. #[test] fn local_agents_md_clean_always_removes_files_even_when_disabled() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - // Step 1: 在默认配置下(agents_md 启用)install,生成 AGENTS.md - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedAgentsFixture::new(true); - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before install"); + fixture + .install() + .assert_success("isolated tnmsc install with agentsMd enabled"); assert!( - runner.agents_md_project_file_exists(), - "AGENTS.md should exist after install with agents_md enabled" + fixture.project_agents_path().is_file(), + "project AGENTS.md should exist after install with agentsMd enabled" + ); + assert!( + fixture.child_agents_path().is_file(), + "child .github/AGENTS.md should exist after install with agentsMd enabled" ); - // Step 2: 临时禁用 agents_md,然后执行 clean - let _guard = GlobalConfigGuard::with_agents_md_disabled(); + fixture.overwrite_agents_enabled(false); - let clean_disabled = runner.clean(); - clean_disabled.assert_success("tnmsc clean with agents_md disabled"); + fixture + .clean() + .assert_success("isolated tnmsc clean with agentsMd disabled"); - // Step 3: 断言 AGENTS.md 已被清理,即使 agents_md 当前被禁用 assert!( - !runner.agents_md_project_file_exists(), - "AGENTS.md should be removed by clean even when agents_md is disabled" + !fixture.project_agents_path().exists(), + "project AGENTS.md should be removed by clean even when agentsMd is disabled" ); assert!( - !runner.agents_md_child_file_exists(".github"), - ".github/AGENTS.md should be removed by clean even when agents_md is disabled" + !fixture.child_agents_path().exists(), + "child .github/AGENTS.md should be removed by clean even when agentsMd is disabled" ); } diff --git a/cli/local-tests/tests/claude_smoke.rs b/cli/local-tests/tests/claude_smoke.rs index 8d1196c3..f6ca648c 100644 --- a/cli/local-tests/tests/claude_smoke.rs +++ b/cli/local-tests/tests/claude_smoke.rs @@ -1,71 +1,189 @@ -//! 本地裸机 CLAUDE.md 测试:验证 ClaudeCodeCLIOutputAdaptor 生成的 CLAUDE.md 文件。 +//! Isolated CLAUDE.md smoke tests for ClaudeCodeCLIOutputAdaptor. //! -//! **核心设计断言**:项目级 CLAUDE.md 的内容应如同 AGENTS.md 一样,直接输出 -//! 项目内存(root_memory_prompt / child_memory_prompts),而非全局内存或 -//! 全局+项目混合内容。Claude Code 是专属 IDE 插件,其项目级记忆文件 -//! 必须承载完整的项目上下文。 -//! -//! **前提**:项目已配置,aindex 目录已存在且有内容。 +//! These tests use a temporary HOME and workspace so they do not rely on or +//! mutate the caller's real `~/.aindex/.tnmsc.json`. + +use std::fs; +use std::path::{Path, PathBuf}; use tnmsc_local_tests::LocalTestRunner; -/// Verify that install generates both the project-root CLAUDE.md and a child -/// .github/CLAUDE.md, both with non-empty content. +struct IsolatedClaudeFixture { + runner: LocalTestRunner, + temp_home: PathBuf, + project_dir: PathBuf, + aindex_project_dir: PathBuf, +} + +impl IsolatedClaudeFixture { + fn new() -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-claude-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let project_dir = workspace_dir.join("memory-sync"); + let aindex_project_dir = workspace_dir.join("aindex").join("app").join("memory-sync"); + + fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + fs::create_dir_all(project_dir.join(".github")).unwrap(); + fs::create_dir_all(aindex_project_dir.join(".github")).unwrap(); + + // issue local-tests-claude-isolation: claude smoke tests must validate + // generated CLAUDE.md files in an isolated HOME/workspace fixture. + write_claude_config(&temp_home, &workspace_dir, true); + fs::write( + workspace_dir.join("aindex").join("global.mdx"), + "# Global memory\n\nGlobal instructions\n", + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("workspace.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("workspace.src.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Claude project root\n\nProject root instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join(".github").join("agt.mdx"), + "# Claude child\n\nChild instructions\n", + ) + .unwrap(); + + Self { + runner: LocalTestRunner::with_cwd(&project_dir), + temp_home, + project_dir, + aindex_project_dir, + } + } + + fn run(&self, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.temp_home.to_string_lossy().into_owned(); + self + .runner + .run_at_with_env(&self.project_dir, args, &[("HOME", &temp_home)]) + } + + fn install(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["install"]) + } + + fn clean(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["clean"]) + } + + fn overwrite_claude_enabled(&self, enabled: bool) { + let workspace_dir = self.project_dir.parent().unwrap_or(&self.project_dir); + write_claude_config(&self.temp_home, workspace_dir, enabled); + } + + fn project_claude_path(&self) -> PathBuf { + self.project_dir.join("CLAUDE.md") + } + + fn child_claude_path(&self) -> PathBuf { + self.project_dir.join(".github").join("CLAUDE.md") + } + + fn global_claude_path(&self) -> PathBuf { + self.temp_home.join(".claude").join("CLAUDE.md") + } +} + +fn write_claude_config(temp_home: &Path, workspace_dir: &Path, enabled: bool) { + fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": enabled, + "codex": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false + } + }) + .to_string(), + ) + .unwrap(); +} + #[test] fn local_claude_install_generates_project_claude_md() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedClaudeFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before claude install"); + fixture + .install() + .assert_failure("isolated tnmsc install should be blocked by protected root CLAUDE.md"); assert!( - runner.claude_project_file_exists(), - "~/workspace/memory-sync/CLAUDE.md should be generated after install" + fixture.project_claude_path().is_file(), + "project CLAUDE.md should be generated after install" ); - - let content = runner - .read_claude_project_file() - .expect("CLAUDE.md should be readable"); - assert!(!content.is_empty(), "CLAUDE.md should not be empty"); - assert!( - runner.claude_child_file_exists(".github"), - "~/workspace/memory-sync/.github/CLAUDE.md should be generated after install" + fixture.child_claude_path().is_file(), + "child .github/CLAUDE.md should be generated after install" + ); + assert!( + !fs::read_to_string(fixture.project_claude_path()) + .unwrap() + .trim() + .is_empty(), + "project CLAUDE.md should not be empty" ); - - let child_content = runner - .read_claude_child_file(".github") - .expect(".github/CLAUDE.md should be readable"); assert!( - !child_content.is_empty(), - ".github/CLAUDE.md should not be empty" + !fs::read_to_string(fixture.child_claude_path()) + .unwrap() + .trim() + .is_empty(), + "child .github/CLAUDE.md should not be empty" ); } -/// Verify that the generated project CLAUDE.md content exactly matches the aindex -/// source file `app/memory-sync/agt.mdx`. Ensures no content drift. #[test] fn local_claude_project_content_matches_aindex_source() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedClaudeFixture::new(); - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before claude install"); + fixture + .install() + .assert_failure("isolated tnmsc install should be blocked by protected root CLAUDE.md"); - let aindex_content = runner - .read_aindex_file("app/memory-sync/agt.mdx") - .expect("aindex source agt.mdx should be readable"); - - let generated_content = runner - .read_claude_project_file() - .expect("CLAUDE.md should be readable after install"); + let aindex_content = fs::read_to_string(fixture.aindex_project_dir.join("agt.mdx")).unwrap(); + let generated_content = fs::read_to_string(fixture.project_claude_path()).unwrap(); assert_eq!( aindex_content.trim(), @@ -74,26 +192,20 @@ fn local_claude_project_content_matches_aindex_source() { ); } -/// Verify that the generated .github/CLAUDE.md content exactly matches the aindex -/// source `app/memory-sync/.github/agt.mdx`. #[test] fn local_claude_child_content_matches_aindex_source() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedClaudeFixture::new(); - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before claude install"); + fixture + .install() + .assert_failure("isolated tnmsc install should be blocked by protected root CLAUDE.md"); - let aindex_child_content = runner - .read_aindex_file("app/memory-sync/.github/agt.mdx") - .expect("aindex source .github/agt.mdx should be readable"); - - let generated_child_content = runner - .read_claude_child_file(".github") - .expect(".github/CLAUDE.md should be readable after install"); + let aindex_child_content = + fs::read_to_string(fixture.aindex_project_dir.join(".github").join("agt.mdx")).unwrap(); + let generated_child_content = fs::read_to_string(fixture.child_claude_path()).unwrap(); assert_eq!( aindex_child_content.trim(), @@ -102,43 +214,37 @@ fn local_claude_child_content_matches_aindex_source() { ); } -/// Verify that `tnmsc clean` removes ALL CLAUDE.md files recursively throughout -/// the project tree, not just the root one. #[test] fn local_claude_clean_removes_all_project_files() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedClaudeFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before claude install"); + fixture + .install() + .assert_failure("isolated tnmsc install before claude clean should hit protected root"); - let install = runner.install(); - install.assert_success("tnmsc install before clean"); + assert!(fixture.project_claude_path().is_file()); - assert!( - runner.claude_project_file_exists(), - "CLAUDE.md should exist after install" - ); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + fixture.clean().assert_success("isolated tnmsc clean"); assert!( - !runner.claude_project_file_exists(), - "CLAUDE.md should be removed after clean" + !fixture.project_claude_path().exists(), + "project CLAUDE.md should be removed after clean" ); - // 递归检查:项目内不应残留任何 CLAUDE.md - fn collect_claude_md_files(dir: &std::path::Path) -> Vec { + fn collect_claude_md_files(dir: &Path) -> Vec { let mut files = Vec::new(); - let Ok(entries) = std::fs::read_dir(dir) else { + let Ok(entries) = fs::read_dir(dir) else { return files; }; for entry in entries.flatten() { let path = entry.path(); - let Ok(ft) = entry.file_type() else { continue }; - if ft.is_dir() { - // 跳过 .git、node_modules、target 等 + let Ok(file_type) = entry.file_type() else { + continue; + }; + if file_type.is_dir() { if let Some(name) = path.file_name() { let name = name.to_string_lossy(); if name.starts_with('.') && name != ".github" @@ -159,47 +265,73 @@ fn local_claude_clean_removes_all_project_files() { files } - let remaining = collect_claude_md_files(runner.cwd()); + let remaining = collect_claude_md_files(&fixture.project_dir); assert!( remaining.is_empty(), - "clean should remove ALL project CLAUDE.md files, found:\n{}", + "clean should remove all project CLAUDE.md files, found:\n{}", remaining .iter() - .map(|p| format!(" - {}", p.display())) + .map(|path| format!(" - {}", path.display())) .collect::>() .join("\n") ); } -/// Verify that the global ~/.claude/CLAUDE.md is generated (it persists independently -/// of project-level clean). #[test] -fn local_claude_global_file_still_generated() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); +fn regression_claude_clean_removes_child_memory_files_even_when_plugin_disabled() { + let fixture = IsolatedClaudeFixture::new(); + + fixture + .clean() + .assert_success("isolated tnmsc clean before claude install"); + fixture + .install() + .assert_failure("isolated tnmsc install before claude clean should hit protected root"); + + assert!(fixture.project_claude_path().is_file()); + assert!(fixture.child_claude_path().is_file()); + + // issue #385: clean must keep deleting generated child CLAUDE.md files even + // after claudeCode is disabled, otherwise stale project memory survives. + fixture.overwrite_claude_enabled(false); + fixture.clean().assert_success( + "isolated tnmsc clean should remove stale claude files even when plugin is disabled", + ); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + assert!( + !fixture.project_claude_path().exists(), + "project CLAUDE.md should be removed after clean with plugin disabled" + ); + assert!( + !fixture.child_claude_path().exists(), + "child .github/CLAUDE.md should be removed after clean with plugin disabled" + ); +} - let install = runner.install(); - install.assert_success("tnmsc install"); +#[test] +fn local_claude_global_file_still_generated() { + let fixture = IsolatedClaudeFixture::new(); + + fixture + .clean() + .assert_success("isolated tnmsc clean before claude install"); + fixture + .install() + .assert_failure("isolated tnmsc install should be blocked by protected root CLAUDE.md"); assert!( - runner.claude_global_file_exists(), + fixture.global_claude_path().is_file(), "global ~/.claude/CLAUDE.md should be generated after install" ); - - let content = runner - .read_claude_global_file() - .expect("global CLAUDE.md should be readable"); - assert!(!content.is_empty(), "global CLAUDE.md should not be empty"); + assert!( + !fs::read_to_string(fixture.global_claude_path()) + .unwrap() + .trim() + .is_empty(), + "global CLAUDE.md should not be empty" + ); } -/// Isolated regression test for categorized skills in Claude output. -/// Verifies that: -/// 1. `name` in SKILL.md matches the generated directory name -/// 2. child docs are compiled and emitted as `.md`, not `.mdx` -/// 3. clean removes the generated project tree #[test] fn regression_isolated_claude_skill_name_and_child_doc_extensions() { let runner = LocalTestRunner::new(); @@ -291,15 +423,15 @@ fn regression_isolated_claude_skill_name_and_child_doc_extensions() { let temp_home_str = temp_home.to_string_lossy().into_owned(); - let install = runner.run_at_with_env( - &workspace_dir, - &["install"], - &[("HOME", &temp_home_str)], + let install = runner.run_at_with_env(&workspace_dir, &["install"], &[("HOME", &temp_home_str)]); + install.assert_failure( + "isolated tnmsc install for claude should be blocked by protected root CLAUDE.md", ); - install.assert_failure("isolated tnmsc install for claude should be blocked by protected root CLAUDE.md"); assert!( install.stderr.contains("Refusing to write protected path.") - || install.stderr.contains("CLAUDE.md: Refusing to write protected path."), + || install + .stderr + .contains("CLAUDE.md: Refusing to write protected path."), "expected protected-path failure for root CLAUDE.md, got stderr:\n{}", install.stderr ); diff --git a/cli/local-tests/tests/clean_blackbox.rs b/cli/local-tests/tests/clean_blackbox.rs index f83d5420..9e457b21 100644 --- a/cli/local-tests/tests/clean_blackbox.rs +++ b/cli/local-tests/tests/clean_blackbox.rs @@ -1,243 +1,330 @@ -//! 本地裸机 clean 测试:验证 tnmsc clean 在真实项目上的行为。 +//! Isolated clean black-box tests for `tnmsc clean`. //! -//! **前提**:项目已配置,且 install 后存在生成的文件。 +//! These tests use a temporary HOME/workspace so clean-scope assertions do not +//! depend on the caller's real `~/.aindex/.tnmsc.json` or `~/workspace/*`. -use std::path::PathBuf; +use std::fs; +use std::path::{Path, PathBuf}; use tnmsc_local_tests::LocalTestRunner; -fn workspace_paths() -> (PathBuf, PathBuf, PathBuf, PathBuf) { - let home = tnmsc_local_tests::home_dir(); - let workspace = home.join("workspace"); - ( - home, - workspace.join("memory-sync"), - workspace.join("aindex"), - workspace.join("knowladge"), +struct IsolatedCleanFixture { + runner: LocalTestRunner, + temp_home: PathBuf, + home_dir: PathBuf, + memory_sync_dir: PathBuf, + aindex_dir: PathBuf, + knowladge_dir: PathBuf, +} + +impl IsolatedCleanFixture { + fn new(claude_enabled: bool, agents_enabled: bool) -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-clean-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let home_dir = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let memory_sync_dir = workspace_dir.join("memory-sync"); + let aindex_dir = workspace_dir.join("aindex"); + let knowladge_dir = workspace_dir.join("knowladge"); + let aindex_project_dir = aindex_dir.join("app").join("memory-sync"); + + fs::create_dir_all(home_dir.join(".aindex")).unwrap(); + fs::create_dir_all(memory_sync_dir.join(".github")).unwrap(); + fs::create_dir_all(&knowladge_dir).unwrap(); + fs::create_dir_all(aindex_project_dir.join(".github")).unwrap(); + + // issue local-tests-clean-isolation: clean black-box tests must own their + // workspace fixture so scope assertions do not depend on the host machine. + write_config(&home_dir, &workspace_dir, claude_enabled, agents_enabled); + write_prompt_sources(&aindex_dir, &aindex_project_dir); + + Self { + runner: LocalTestRunner::with_cwd(&memory_sync_dir), + temp_home: home_dir.clone(), + home_dir, + memory_sync_dir, + aindex_dir, + knowladge_dir, + } + } + + fn env_home(&self) -> String { + self.temp_home.to_string_lossy().into_owned() + } + + fn run_at(&self, cwd: &Path, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.env_home(); + self + .runner + .run_at_with_env(cwd, args, &[("HOME", &temp_home)]) + } + + fn install(&self) -> tnmsc_local_tests::CommandResult { + self.run_at(&self.memory_sync_dir, &["install"]) + } + + fn clean_at(&self, cwd: &Path) -> tnmsc_local_tests::CommandResult { + self.run_at(cwd, &["clean"]) + } + + fn dry_run_at(&self, cwd: &Path) -> tnmsc_local_tests::CommandResult { + self.run_at(cwd, &["clean", "--dry-run"]) + } + + fn project_claude_path(&self) -> PathBuf { + self.memory_sync_dir.join("CLAUDE.md") + } + + fn project_agents_path(&self) -> PathBuf { + self.memory_sync_dir.join("AGENTS.md") + } + + fn knowladge_agents_path(&self) -> PathBuf { + self.knowladge_dir.join("AGENTS.md") + } + + fn aindex_agents_path(&self) -> PathBuf { + self.aindex_dir.join("AGENTS.md") + } +} + +fn write_prompt_sources(aindex_dir: &Path, aindex_project_dir: &Path) { + fs::write( + aindex_dir.join("global.mdx"), + "# Global memory\n\nGlobal instructions\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("workspace.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("workspace.src.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Project root\n\nProject root instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join(".github").join("agt.mdx"), + "# Child root\n\nChild instructions\n", + ) + .unwrap(); +} + +fn write_config(home_dir: &Path, workspace_dir: &Path, claude_enabled: bool, agents_enabled: bool) { + fs::write( + home_dir.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": agents_enabled, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": claude_enabled, + "codex": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false + } + }) + .to_string(), ) + .unwrap(); } /// Verify the basic clean lifecycle: install creates CLAUDE.md, clean removes it. #[test] fn local_clean_removes_project_claude_md() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedCleanFixture::new(true, false); - // 先 clean 再 install 确保可复现 - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean_at(&fixture.memory_sync_dir) + .assert_success("isolated tnmsc clean before install"); - // 先 install 生成文件 - let install = runner.install(); - install.assert_success("tnmsc install before clean"); + let install = fixture.install(); + install.assert_failure("isolated tnmsc install should be blocked by protected root CLAUDE.md"); assert!( - runner.file_exists("CLAUDE.md"), - "~/workspace/memory-sync/CLAUDE.md should exist after install" + fixture.project_claude_path().is_file(), + "project CLAUDE.md should exist after install" ); - // clean 删除生成的文件 - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + fixture + .clean_at(&fixture.memory_sync_dir) + .assert_success("isolated tnmsc clean"); assert!( - !runner.file_exists("CLAUDE.md"), - "~/workspace/memory-sync/CLAUDE.md should be removed after clean" + !fixture.project_claude_path().exists(), + "project CLAUDE.md should be removed after clean" ); } /// Verify that `tnmsc clean --dry-run` does NOT delete files — it only previews what would be cleaned. #[test] fn local_clean_dry_run_does_not_remove_files() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedCleanFixture::new(true, false); - // 先 clean 再 install 确保可复现 - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean_at(&fixture.memory_sync_dir) + .assert_success("isolated tnmsc clean before install"); - // 先 install 生成文件 - let install = runner.install(); - install.assert_success("tnmsc install before dry-run clean"); - assert!(runner.file_exists("CLAUDE.md")); + let install = fixture.install(); + install.assert_failure("isolated tnmsc install should be blocked by protected root CLAUDE.md"); + assert!(fixture.project_claude_path().is_file()); - // dry-run clean 不应删除文件 - let dry_clean = runner.run(&["clean", "--dry-run"]); - dry_clean.assert_success("tnmsc clean --dry-run"); + fixture + .dry_run_at(&fixture.memory_sync_dir) + .assert_success("isolated tnmsc clean --dry-run"); assert!( - runner.file_exists("CLAUDE.md"), - "CLAUDE.md should still exist after dry-run clean" + fixture.project_claude_path().is_file(), + "project CLAUDE.md should still exist after dry-run clean" ); } -/// Verify that running `tnmsc clean` inside ~/workspace/memory-sync/ only cleans -/// that project, not sibling projects like aindex or knowladge. +/// Verify that running `tnmsc clean` inside memory-sync only cleans that project. #[test] fn local_clean_from_memory_sync_does_not_clean_other_projects() { - let (home, memory_sync, aindex, knowladge) = workspace_paths(); - - // 使用单个 runner,通过 run_at 在不同目录执行命令,保持锁不释放 - let runner = LocalTestRunner::with_cwd(&memory_sync); - runner.assert_project_ready(); + let fixture = IsolatedCleanFixture::new(false, true); - // 从 home 全局清理,确保干净状态 - runner - .run_at(&home, &["clean"]) - .assert_success("clean from home before test"); + fixture + .clean_at(&fixture.home_dir) + .assert_success("isolated clean from home before scoped clean"); - // install 生成文件 - runner.install().assert_success("install before clean"); + fixture.install().assert_failure( + "isolated tnmsc install before scoped clean should hit protected workspace AGENTS.md", + ); - // 手动创建 aindex/AGENTS.md(aindex 不是 project root,install 不会生成) - std::fs::write(aindex.join("AGENTS.md"), "# Test AGENTS.md\n") - .expect("should write aindex AGENTS.md"); + // issue local-tests-clean-scope: keep one manually managed sibling file so + // we verify scope filtering on arbitrary workspace files, not just install outputs. + fs::write(fixture.aindex_agents_path(), "# Test AGENTS.md\n").unwrap(); + fs::write(fixture.knowladge_agents_path(), "# Test AGENTS.md\n").unwrap(); - // 验证所有文件都存在 assert!( - runner.file_exists("AGENTS.md"), + fixture.project_agents_path().is_file(), "memory-sync/AGENTS.md should exist after install" ); assert!( - runner.file_exists_at(knowladge.join("AGENTS.md")), + fixture.knowladge_agents_path().is_file(), "knowladge/AGENTS.md should exist after install" ); assert!( - runner.file_exists_at(aindex.join("AGENTS.md")), + fixture.aindex_agents_path().is_file(), "aindex/AGENTS.md should exist after manual create" ); - // 从 memory-sync 执行 clean - runner - .clean() - .assert_success("tnmsc clean from memory-sync"); + fixture + .clean_at(&fixture.memory_sync_dir) + .assert_success("isolated tnmsc clean from memory-sync"); - // memory-sync 的 AGENTS.md 应该被清理 assert!( - !runner.file_exists("AGENTS.md"), + !fixture.project_agents_path().exists(), "memory-sync/AGENTS.md should be removed after scoped clean" ); - - // 其他项目的 AGENTS.md 应该保留 assert!( - runner.file_exists_at(knowladge.join("AGENTS.md")), + fixture.knowladge_agents_path().is_file(), "knowladge/AGENTS.md should still exist after scoped clean" ); assert!( - runner.file_exists_at(aindex.join("AGENTS.md")), + fixture.aindex_agents_path().is_file(), "aindex/AGENTS.md should still exist after scoped clean" ); } -/// Verify the reverse: running clean inside ~/workspace/aindex/ does not affect -/// memory-sync's generated files. +/// Verify the reverse: running clean inside aindex does not affect memory-sync outputs. +/// +/// The prompt-source root is reserved workspace state, so this scoped clean is a +/// no-op for the manually created root-level `aindex/AGENTS.md`. #[test] fn local_clean_from_aindex_does_not_clean_memory_sync() { - let (home, memory_sync, aindex, knowladge) = workspace_paths(); - - // 使用单个 runner,通过 run_at 在不同目录执行命令 - let runner = LocalTestRunner::with_cwd(&memory_sync); - runner.assert_project_ready(); + let fixture = IsolatedCleanFixture::new(false, true); - // 从 home 全局清理,确保干净状态 - runner - .run_at(&home, &["clean"]) - .assert_success("clean from home before test"); + fixture + .clean_at(&fixture.home_dir) + .assert_success("isolated clean from home before scoped clean"); - // install 生成文件 - runner.install().assert_success("install before clean"); - - // 手动创建 aindex/AGENTS.md - std::fs::write(aindex.join("AGENTS.md"), "# Test AGENTS.md\n") - .expect("should write aindex AGENTS.md"); - - // 验证文件存在 - assert!( - runner.file_exists("AGENTS.md"), - "memory-sync/AGENTS.md should exist after install" - ); - assert!( - runner.file_exists_at(knowladge.join("AGENTS.md")), - "knowladge/AGENTS.md should exist after install" - ); - assert!( - runner.file_exists_at(aindex.join("AGENTS.md")), - "aindex/AGENTS.md should exist after manual create" + fixture.install().assert_failure( + "isolated tnmsc install before scoped clean should hit protected workspace AGENTS.md", ); + fs::write(fixture.aindex_agents_path(), "# Test AGENTS.md\n").unwrap(); + fs::write(fixture.knowladge_agents_path(), "# Test AGENTS.md\n").unwrap(); - // 从 aindex 执行 clean - runner - .run_at(&aindex, &["clean"]) - .assert_success("tnmsc clean from aindex"); + assert!(fixture.project_agents_path().is_file()); + assert!(fixture.knowladge_agents_path().is_file()); + assert!(fixture.aindex_agents_path().is_file()); + + fixture + .clean_at(&fixture.aindex_dir) + .assert_success("isolated tnmsc clean from aindex"); - // aindex 的 AGENTS.md 应该被清理(在作用域内) assert!( - !runner.file_exists_at(aindex.join("AGENTS.md")), - "aindex/AGENTS.md should be removed after scoped clean" + fixture.aindex_agents_path().is_file(), + "aindex/AGENTS.md should remain after scoped clean from reserved aindex root" ); - - // memory-sync 和 knowladge 的 AGENTS.md 应该保留 assert!( - runner.file_exists("AGENTS.md"), + fixture.project_agents_path().is_file(), "memory-sync/AGENTS.md should still exist after scoped clean from aindex" ); assert!( - runner.file_exists_at(knowladge.join("AGENTS.md")), + fixture.knowladge_agents_path().is_file(), "knowladge/AGENTS.md should still exist after scoped clean from aindex" ); } -/// Verify that running clean from ~/ (which is above all workspace projects) cleans -/// all projects under the workspace directory. +/// Verify that running clean from HOME cleans all projects under the workspace. #[test] fn local_clean_from_home_cleans_all_projects() { - let (home, memory_sync, aindex, knowladge) = workspace_paths(); - - // 使用单个 runner,通过 run_at 在不同目录执行命令 - let runner = LocalTestRunner::with_cwd(&memory_sync); - runner.assert_project_ready(); - - // 从 home 全局清理,确保干净状态 - runner - .run_at(&home, &["clean"]) - .assert_success("clean from home before test"); - - // install 生成文件 - runner.install().assert_success("install before clean"); + let fixture = IsolatedCleanFixture::new(false, true); - // 手动创建 aindex/AGENTS.md - std::fs::write(aindex.join("AGENTS.md"), "# Test AGENTS.md\n") - .expect("should write aindex AGENTS.md"); + fixture + .clean_at(&fixture.home_dir) + .assert_success("isolated clean from home before global clean"); - // 验证所有文件都存在 - assert!( - runner.file_exists("AGENTS.md"), - "memory-sync/AGENTS.md should exist after install" - ); - assert!( - runner.file_exists_at(knowladge.join("AGENTS.md")), - "knowladge/AGENTS.md should exist after install" - ); - assert!( - runner.file_exists_at(aindex.join("AGENTS.md")), - "aindex/AGENTS.md should exist after manual create" + fixture.install().assert_failure( + "isolated tnmsc install before global clean should hit protected workspace AGENTS.md", ); + fs::write(fixture.aindex_agents_path(), "# Test AGENTS.md\n").unwrap(); + fs::write(fixture.knowladge_agents_path(), "# Test AGENTS.md\n").unwrap(); + + assert!(fixture.project_agents_path().is_file()); + assert!(fixture.knowladge_agents_path().is_file()); + assert!(fixture.aindex_agents_path().is_file()); - // 从 home 执行 clean(不在 workspace 子项目内,应清理全部) - runner - .run_at(&home, &["clean"]) - .assert_success("tnmsc clean from home"); + fixture + .clean_at(&fixture.home_dir) + .assert_success("isolated tnmsc clean from home"); - // 所有项目的 AGENTS.md 都应该被清理 assert!( - !runner.file_exists("AGENTS.md"), + !fixture.project_agents_path().exists(), "memory-sync/AGENTS.md should be removed after global clean" ); assert!( - !runner.file_exists_at(knowladge.join("AGENTS.md")), + !fixture.knowladge_agents_path().exists(), "knowladge/AGENTS.md should be removed after global clean" ); assert!( - !runner.file_exists_at(aindex.join("AGENTS.md")), + !fixture.aindex_agents_path().exists(), "aindex/AGENTS.md should be removed after global clean" ); } diff --git a/cli/local-tests/tests/codex_smoke.rs b/cli/local-tests/tests/codex_smoke.rs index 675c0180..c4d07f56 100644 --- a/cli/local-tests/tests/codex_smoke.rs +++ b/cli/local-tests/tests/codex_smoke.rs @@ -1,25 +1,255 @@ -//! 本地裸机 codex 测试:验证 CodexCLIOutputAdaptor 生成的 codex 文件。 +//! Isolated codex smoke tests for CodexCLIOutputAdaptor. //! -//! **前提**:项目已配置,codex 插件已启用(plugins.codex = true)。 +//! These tests use a temporary HOME/workspace fixture so codex output checks do +//! not depend on the caller's real `~/.aindex/.tnmsc.json` or `~/.codex`. + +use std::collections::HashSet; +use std::fs; +use std::path::{Path, PathBuf}; use tnmsc_local_tests::LocalTestRunner; -fn assert_codex_plugin_enabled() { - let config_path = dirs::home_dir() - .expect("should have home directory") - .join(".aindex") - .join(".tnmsc.json"); - let raw = std::fs::read_to_string(&config_path).expect("~/.aindex/.tnmsc.json should exist"); - let parsed: serde_json::Value = serde_json::from_str(&raw).expect("should be valid JSON"); - let codex_enabled = parsed - .get("plugins") - .and_then(|p| p.get("codex")) - .and_then(|v| v.as_bool()) - .unwrap_or(false); - assert!( - codex_enabled, - "plugins.codex must be set to true in ~/.aindex/.tnmsc.json" - ); +struct IsolatedCodexFixture { + runner: LocalTestRunner, + temp_home: PathBuf, + project_dir: PathBuf, + aindex_dir: PathBuf, +} + +impl IsolatedCodexFixture { + fn new() -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-codex-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let project_dir = workspace_dir.join("memory-sync"); + let aindex_dir = workspace_dir.join("aindex"); + let aindex_project_dir = aindex_dir.join("app").join("memory-sync"); + + fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + fs::create_dir_all(project_dir.join(".github")).unwrap(); + fs::create_dir_all(aindex_project_dir.join(".github")).unwrap(); + fs::create_dir_all(aindex_dir.join("commands")).unwrap(); + fs::create_dir_all(aindex_dir.join("subagents").join("qa")).unwrap(); + fs::create_dir_all( + aindex_dir + .join("skills") + .join("browser") + .join("agent-browser"), + ) + .unwrap(); + fs::create_dir_all(aindex_dir.join("skills").join("plain-skill")).unwrap(); + + // issue local-tests-codex-isolation: codex smoke tests must not depend on + // the host ~/.codex or host workspace prompt inventory. + write_codex_config(&temp_home, &workspace_dir); + write_codex_prompt_sources(&aindex_dir, &aindex_project_dir); + + Self { + runner: LocalTestRunner::with_cwd(&project_dir), + temp_home, + project_dir, + aindex_dir, + } + } + + fn env_home(&self) -> String { + self.temp_home.to_string_lossy().into_owned() + } + + fn run(&self, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.env_home(); + self + .runner + .run_at_with_env(&self.project_dir, args, &[("HOME", &temp_home)]) + } + + fn install(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["install"]) + } + + fn clean(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["clean"]) + } + + fn dry_run(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["dry-run"]) + } + + fn global_codex_dir(&self) -> PathBuf { + self.temp_home.join(".codex") + } + + fn global_agents_path(&self) -> PathBuf { + self.global_codex_dir().join("AGENTS.md") + } + + fn global_prompts_dir(&self) -> PathBuf { + self.global_codex_dir().join("prompts") + } + + fn global_agents_dir(&self) -> PathBuf { + self.global_codex_dir().join("agents") + } + + fn project_codex_dir(&self) -> PathBuf { + self.project_dir.join(".codex") + } + + fn project_agents_dir(&self) -> PathBuf { + self.project_codex_dir().join("agents") + } + + fn project_skills_dir(&self) -> PathBuf { + self.project_codex_dir().join("skills") + } +} + +fn write_codex_config(temp_home: &Path, workspace_dir: &Path) { + fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": false, + "codex": true, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false + } + }) + .to_string(), + ) + .unwrap(); +} + +fn write_codex_prompt_sources(aindex_dir: &Path, aindex_project_dir: &Path) { + fs::write( + aindex_dir.join("global.mdx"), + "---\ndescription: global memory\n---\nGlobal codex memory\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("workspace.mdx"), + "---\ndescription: workspace memory\n---\nWorkspace codex memory\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("workspace.src.mdx"), + "---\ndescription: workspace memory\n---\nWorkspace codex memory\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Project codex memory\n\nProject root instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join(".github").join("agt.mdx"), + "# Child codex memory\n\nChild instructions\n", + ) + .unwrap(); + + fs::write( + aindex_dir.join("commands").join("demo.mdx"), + "---\ndescription: Demo command\nargumentHint: target\nscope: global\n---\nRun demo command\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("commands").join("qa_boot.mdx"), + "---\ndescription: QA boot\nargumentHint: repo\nscope: global\n---\nRun QA boot\n", + ) + .unwrap(); + + fs::write( + aindex_dir.join("subagents").join("demo.mdx"), + "---\ndescription: Demo agent\nscope: global\n---\nDemo agent instructions\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("subagents").join("qa").join("boot.mdx"), + "---\ndescription: QA boot agent\nscope: global\n---\nQA boot instructions\n", + ) + .unwrap(); + + let browser_skill_dir = aindex_dir + .join("skills") + .join("browser") + .join("agent-browser"); + fs::create_dir_all(browser_skill_dir.join("references")).unwrap(); + fs::create_dir_all(browser_skill_dir.join("templates")).unwrap(); + fs::create_dir_all(browser_skill_dir.join("assets")).unwrap(); + fs::write( + browser_skill_dir.join("skill.mdx"), + "export default { description: 'Browser skill', name: 'Browser Agent Browser' }\n\n# Browser Skill\n", + ) + .unwrap(); + fs::write( + browser_skill_dir.join("skill.src.mdx"), + "export default { description: 'Browser skill', name: 'Browser Agent Browser' }\n\n# Browser Skill\n", + ) + .unwrap(); + fs::write( + browser_skill_dir.join("references").join("linux-wsl.mdx"), + "---\ndescription: Linux WSL reference\n---\n# Linux WSL\n", + ) + .unwrap(); + fs::write( + browser_skill_dir + .join("references") + .join("linux-wsl.src.mdx"), + "---\ndescription: Linux WSL reference\n---\n# Linux WSL\n", + ) + .unwrap(); + fs::write( + browser_skill_dir + .join("templates") + .join("capture-workflow.sh"), + "#!/usr/bin/env bash\necho capture\n", + ) + .unwrap(); + fs::write( + browser_skill_dir.join("assets").join("logo.png"), + [0x89_u8, 0x50, 0x4E, 0x47, 0x00, 0xFF], + ) + .unwrap(); + fs::write( + browser_skill_dir.join("mcp.json"), + "{\n \"mcpServers\": {\n \"browser\": { \"command\": \"agent-browser\" }\n }\n}\n", + ) + .unwrap(); + + let plain_skill_dir = aindex_dir.join("skills").join("plain-skill"); + fs::write( + plain_skill_dir.join("skill.mdx"), + "export default { description: 'Plain skill' }\n\n# Plain Skill\n", + ) + .unwrap(); + fs::write( + plain_skill_dir.join("skill.src.mdx"), + "export default { description: 'Plain skill' }\n\n# Plain Skill\n", + ) + .unwrap(); } fn expected_installed_skill_names( @@ -28,7 +258,11 @@ fn expected_installed_skill_names( let mut names = std::collections::HashSet::new(); for entry in std::fs::read_dir(aindex_skills_dir).unwrap().flatten() { - if !entry.file_type().map(|file_type| file_type.is_dir()).unwrap_or(false) { + if !entry + .file_type() + .map(|file_type| file_type.is_dir()) + .unwrap_or(false) + { continue; } @@ -59,56 +293,58 @@ fn expected_installed_skill_names( names } -/// Verify that install generates the global ~/.codex/AGENTS.md with non-empty content. +fn collect_file_names(dir: &Path, suffix: &str) -> HashSet { + fs::read_dir(dir) + .unwrap() + .flatten() + .filter(|entry| { + entry + .file_type() + .map(|file_type| file_type.is_file()) + .unwrap_or(false) + && entry.file_name().to_string_lossy().ends_with(suffix) + }) + .map(|entry| entry.file_name().to_string_lossy().to_string()) + .collect() +} + #[test] fn local_codex_install_generates_global_agents_md() { - assert_codex_plugin_enabled(); - - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedCodexFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before codex install"); + fixture + .install() + .assert_failure("isolated tnmsc install should surface protected workspace AGENTS.md"); assert!( - runner.codex_global_file_exists(), + fixture.global_agents_path().is_file(), "~/.codex/AGENTS.md should be generated after install" ); - - let content = runner - .read_codex_global_file() - .expect("~/.codex/AGENTS.md should be readable"); assert!( - !content.is_empty(), + !fs::read_to_string(fixture.global_agents_path()) + .unwrap() + .trim() + .is_empty(), "~/.codex/AGENTS.md should not be empty" ); } -/// Verify that the global ~/.codex/AGENTS.md content exactly matches the aindex -/// `global.mdx` source. #[test] fn local_codex_global_agents_md_matches_aindex_source() { - assert_codex_plugin_enabled(); - - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedCodexFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before codex install"); + fixture + .install() + .assert_failure("isolated tnmsc install should surface protected workspace AGENTS.md"); - let install = runner.install(); - install.assert_success("tnmsc install"); - - let aindex_global = runner - .read_aindex_file("global.mdx") - .expect("aindex global.mdx should be readable"); - - let codex_global = runner - .read_codex_global_file() - .expect("~/.codex/AGENTS.md should be readable after install"); + let aindex_global = fs::read_to_string(fixture.aindex_dir.join("global.mdx")).unwrap(); + let codex_global = fs::read_to_string(fixture.global_agents_path()).unwrap(); assert_eq!( aindex_global.trim(), @@ -117,54 +353,38 @@ fn local_codex_global_agents_md_matches_aindex_source() { ); } -/// Verify that install creates the ~/.codex/prompts/ directory. #[test] fn local_codex_install_generates_global_prompts_dir() { - assert_codex_plugin_enabled(); - - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedCodexFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before codex install"); + fixture + .install() + .assert_failure("isolated tnmsc install should surface protected workspace AGENTS.md"); assert!( - runner.codex_global_prompts_dir_exists(), + fixture.global_prompts_dir().is_dir(), "~/.codex/prompts/ should be generated after install" ); } -/// Verify that prompt files in ~/.codex/prompts/ are all .md files with correct format -/// (kebab-case fields like argument-hint, not camelCase argumentHint). #[test] fn local_codex_prompts_match_aindex_commands() { - assert_codex_plugin_enabled(); - - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedCodexFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before codex install"); + fixture + .install() + .assert_failure("isolated tnmsc install should surface protected workspace AGENTS.md"); - let install = runner.install(); - install.assert_success("tnmsc install"); - - assert!( - runner.codex_global_prompts_dir_exists(), - "~/.codex/prompts/ should exist after install" - ); - - let prompts_dir = dirs::home_dir() - .expect("should have home directory") - .join(".codex") - .join("prompts"); - let prompt_files: Vec<_> = std::fs::read_dir(&prompts_dir) + let prompt_files: Vec<_> = fs::read_dir(fixture.global_prompts_dir()) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); assert!( @@ -172,7 +392,19 @@ fn local_codex_prompts_match_aindex_commands() { "~/.codex/prompts/ should contain at least one file" ); - // Verify all files are .md with correct codex prompts format + let prompt_names: HashSet = prompt_files + .iter() + .map(|entry| entry.file_name().to_string_lossy().to_string()) + .collect(); + assert!( + prompt_names.contains("demo.md"), + "codex prompts should include demo.md" + ); + assert!( + prompt_names.contains("qa-boot.md"), + "codex prompts should include qa-boot.md" + ); + for file in &prompt_files { let name = file.file_name(); let name_str = name.to_string_lossy(); @@ -181,50 +413,32 @@ fn local_codex_prompts_match_aindex_commands() { "every file in ~/.codex/prompts must be .md, got: {}", name_str ); - let content = std::fs::read_to_string(file.path()).unwrap(); - - // If file has front matter, validate it - if content.starts_with("---\n") { - // Codex prompts use kebab-case for field names (e.g., argument-hint, not argumentHint) - if content.contains("argument") { - assert!( - !content.contains("argumentHint:"), - "prompt file {} should use 'argument-hint' (kebab-case), not 'argumentHint' (camelCase)", - name_str - ); - } + let content = fs::read_to_string(file.path()).unwrap(); + if content.contains("argument") { + assert!( + !content.contains("argumentHint:"), + "prompt file {} should use 'argument-hint', not 'argumentHint'", + name_str + ); } } } -/// Verify that codex prompt files do NOT contain a `command:` field (compatibility issue) -/// and that all YAML values are enclosed in double quotes. #[test] fn local_codex_prompts_no_command_field_and_quoted_values() { - assert_codex_plugin_enabled(); - - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + let fixture = IsolatedCodexFixture::new(); - assert!( - runner.codex_global_prompts_dir_exists(), - "~/.codex/prompts/ should exist after install" - ); + fixture + .clean() + .assert_success("isolated tnmsc clean before codex install"); + fixture + .install() + .assert_failure("isolated tnmsc install should surface protected workspace AGENTS.md"); - let prompts_dir = dirs::home_dir() - .expect("should have home directory") - .join(".codex") - .join("prompts"); - let prompt_files: Vec<_> = std::fs::read_dir(&prompts_dir) + let prompt_files: Vec<_> = fs::read_dir(fixture.global_prompts_dir()) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); assert!( @@ -233,45 +447,32 @@ fn local_codex_prompts_no_command_field_and_quoted_values() { ); for file in &prompt_files { - let name = file.file_name(); - let name_str = name.to_string_lossy(); - let content = std::fs::read_to_string(file.path()).unwrap(); - - // Extract front matter between --- markers + let name_str = file.file_name().to_string_lossy().to_string(); + let content = fs::read_to_string(file.path()).unwrap(); let fm_end = content.find("\n---\n").unwrap_or(content.len()); let front_matter = &content[..fm_end]; - // 1. Codex prompts must NOT contain "command" field (compatibility issue) assert!( !front_matter.contains("command:"), - "prompt file {} must NOT contain 'command:' field (codex compatibility issue), got:\n{}", - name_str, - front_matter + "prompt file {} must NOT contain 'command:' field", + name_str ); - // 2. All YAML field values must be enclosed in double quotes - // In codex prompts, only "description" and "argument-hint" fields are valid for line in front_matter.lines() { if let Some(pos) = line.find(": ") { let key = &line[..pos]; let value = &line[pos + 2..]; - - // Only check known codex prompt fields let key_trimmed = key.trim(); if key_trimmed != "description" && key_trimmed != "argument-hint" { continue; } - - // Skip empty values if value.trim().is_empty() { continue; } - - // Check that value is enclosed in double quotes let trimmed = value.trim(); assert!( trimmed.starts_with('"') && trimmed.ends_with('"'), - "prompt file {} has unquoted value '{}' in line '{}' (all codex prompt values must be quoted)", + "prompt file {} has unquoted value '{}' in line '{}'", name_str, value, line @@ -281,136 +482,81 @@ fn local_codex_prompts_no_command_field_and_quoted_values() { } } -/// Verify that install creates the project-level .codex/ directory. #[test] fn local_codex_install_generates_project_codex_dir() { - assert_codex_plugin_enabled(); + let fixture = IsolatedCodexFixture::new(); - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before codex install"); + fixture + .install() + .assert_failure("isolated tnmsc install should surface protected workspace AGENTS.md"); assert!( - runner.codex_project_dir_exists(), - "~/workspace/memory-sync/.codex/ should be generated after install" + fixture.project_codex_dir().is_dir(), + "project .codex/ should be generated after install" ); } -/// Verify that the project .codex/skills/ directory names exactly match the transformed -/// aindex/skills/ names (same count, same names). #[test] fn local_codex_project_skills_match_aindex_skills() { - assert_codex_plugin_enabled(); - - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedCodexFixture::new(); - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before codex install"); + fixture + .install() + .assert_failure("isolated tnmsc install should surface protected workspace AGENTS.md"); assert!( - runner.codex_project_skills_dir_exists(), - "~/workspace/memory-sync/.codex/skills/ should exist after install" + fixture.project_skills_dir().is_dir(), + "project .codex/skills/ should exist after install" ); - // Count aindex skills - let aindex_dir = runner - .resolve_aindex_dir() - .expect("aindex dir should exist"); - let aindex_skills_dir = aindex_dir.join("skills"); - let project_skills_dir = runner.cwd().join(".codex").join("skills"); + let aindex_skills_dir = fixture.aindex_dir.join("skills"); let expected_names = expected_installed_skill_names(&aindex_skills_dir); - let project_names: std::collections::HashSet = std::fs::read_dir(&project_skills_dir) + let project_names: HashSet = fs::read_dir(fixture.project_skills_dir()) .unwrap() .flatten() - .filter(|entry| entry.file_type().map(|file_type| file_type.is_dir()).unwrap_or(false)) + .filter(|entry| { + entry + .file_type() + .map(|file_type| file_type.is_dir()) + .unwrap_or(false) + }) .map(|entry| entry.file_name().to_string_lossy().to_string()) .collect(); - assert_eq!( - expected_names.len(), - project_names.len(), - "project .codex/skills should have same count as aindex/skills" - ); - assert_eq!( expected_names, project_names, "project .codex/skills directory names should match transformed aindex/skills names" ); } -/// Verify that global ~/.codex/agents/*.toml files are also present in the project -/// .codex/agents/ directory with matching filenames. #[test] fn local_codex_global_agents_copied_to_project() { - assert_codex_plugin_enabled(); - - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedCodexFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before codex install"); + fixture + .install() + .assert_failure("isolated tnmsc install should surface protected workspace AGENTS.md"); assert!( - runner.codex_global_agents_dir_exists(), + fixture.global_agents_dir().is_dir(), "~/.codex/agents/ should exist after install" ); - assert!( - runner.codex_project_agents_dir_exists(), - "~/workspace/memory-sync/.codex/agents/ should exist after install" + fixture.project_agents_dir().is_dir(), + "project .codex/agents/ should exist after install" ); - // Compare global and project agents - let global_agents_dir = dirs::home_dir() - .expect("should have home directory") - .join(".codex") - .join("agents"); - let project_agents_dir = runner.cwd().join(".codex").join("agents"); - - let global_agent_files: Vec<_> = std::fs::read_dir(&global_agents_dir) - .unwrap() - .flatten() - .filter(|e| { - e.file_type().map(|ft| ft.is_file()).unwrap_or(false) - && e.file_name().to_string_lossy().ends_with(".toml") - }) - .collect(); - - let project_agent_files: Vec<_> = std::fs::read_dir(&project_agents_dir) - .unwrap() - .flatten() - .filter(|e| { - e.file_type().map(|ft| ft.is_file()).unwrap_or(false) - && e.file_name().to_string_lossy().ends_with(".toml") - }) - .collect(); - - assert_eq!( - global_agent_files.len(), - project_agent_files.len(), - "project .codex/agents should have same count as global ~/.codex/agents" - ); - - let global_names: std::collections::HashSet = global_agent_files - .iter() - .map(|e| e.file_name().to_string_lossy().to_string()) - .collect(); - let project_names: std::collections::HashSet = project_agent_files - .iter() - .map(|e| e.file_name().to_string_lossy().to_string()) - .collect(); + let global_names = collect_file_names(&fixture.global_agents_dir(), ".toml"); + let project_names = collect_file_names(&fixture.project_agents_dir(), ".toml"); assert_eq!( global_names, project_names, @@ -418,49 +564,37 @@ fn local_codex_global_agents_copied_to_project() { ); } -/// Verify that all files in the project .codex/agents/ directory are .toml files with -/// the expected `name` and `developer_instructions` fields. #[test] fn local_codex_project_agents_are_all_toml() { - assert_codex_plugin_enabled(); + let fixture = IsolatedCodexFixture::new(); - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + fixture + .clean() + .assert_success("isolated tnmsc clean before codex install"); + fixture + .install() + .assert_failure("isolated tnmsc install should surface protected workspace AGENTS.md"); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); - - assert!( - runner.codex_project_agents_dir_exists(), - "~/workspace/memory-sync/.codex/agents/ should exist after install" - ); - - let agents_dir = runner.cwd().join(".codex").join("agents"); - let agent_files: Vec<_> = std::fs::read_dir(&agents_dir) + let agent_files: Vec<_> = fs::read_dir(fixture.project_agents_dir()) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); assert!( !agent_files.is_empty(), - "~/workspace/memory-sync/.codex/agents/ should contain at least one file" + "project .codex/agents/ should contain at least one file" ); for file in &agent_files { - let name = file.file_name(); - let name_str = name.to_string_lossy(); + let name_str = file.file_name().to_string_lossy().to_string(); assert!( name_str.ends_with(".toml"), "every file in .codex/agents must be .toml, got: {}", name_str ); - // Verify it's valid TOML with expected fields - let content = std::fs::read_to_string(file.path()).unwrap(); + let content = fs::read_to_string(file.path()).unwrap(); assert!( content.contains("name = "), "agent file {} should contain 'name' field", @@ -474,54 +608,58 @@ fn local_codex_project_agents_are_all_toml() { } } -/// Verify that `tnmsc clean` removes the generated .codex/ directory. #[test] fn local_codex_clean_removes_files() { - assert_codex_plugin_enabled(); + let fixture = IsolatedCodexFixture::new(); - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let install = runner.install(); - install.assert_success("tnmsc install before clean"); + fixture.install().assert_failure( + "isolated tnmsc install before codex clean should surface protected workspace AGENTS.md", + ); assert!( - runner.codex_project_dir_exists(), + fixture.project_codex_dir().is_dir(), ".codex/ should exist after install" ); - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + fixture.clean().assert_success("isolated tnmsc clean"); assert!( - !runner.codex_project_dir_exists(), + !fixture.project_codex_dir().exists(), ".codex/ should be removed after clean" ); + assert!( + !fixture.global_codex_dir().join("agents").exists(), + "~/.codex/agents should be removed after clean" + ); + assert!( + !fixture.global_codex_dir().join("prompts").exists(), + "~/.codex/prompts should be removed after clean" + ); } -/// Verify that `tnmsc dry-run` does NOT create the .codex/ directory. #[test] fn local_codex_dry_run_does_not_write() { - assert_codex_plugin_enabled(); + let fixture = IsolatedCodexFixture::new(); - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before dry-run"); + fixture + .clean() + .assert_success("isolated tnmsc clean before codex dry-run"); assert!( - !runner.codex_project_dir_exists(), + !fixture.project_codex_dir().exists(), ".codex/ should not exist before dry-run" ); - let dry = runner.dry_run(); - dry.assert_success("tnmsc dry-run"); + fixture.dry_run().assert_success("isolated tnmsc dry-run"); assert!( - !runner.codex_project_dir_exists(), + !fixture.project_codex_dir().exists(), ".codex/ should not be created by dry-run" ); + assert!( + !fixture.global_codex_dir().exists(), + "~/.codex/ should not be created by dry-run" + ); } /// Isolated regression test: install into a temp directory (not the real project) with @@ -543,7 +681,10 @@ fn regression_isolated_install_outputs_full_browser_skill_and_clean_removes_it() let temp_home = temp_root.join("home"); let workspace_dir = temp_root.join("workspace"); let aindex_dir = workspace_dir.join("aindex"); - let skill_dir = aindex_dir.join("skills").join("browser").join("agent-browser"); + let skill_dir = aindex_dir + .join("skills") + .join("browser") + .join("agent-browser"); std::fs::create_dir_all(temp_home.join(".aindex")).unwrap(); std::fs::create_dir_all(skill_dir.join("references")).unwrap(); @@ -644,12 +785,18 @@ fn regression_isolated_install_outputs_full_browser_skill_and_clean_removes_it() let temp_home_str = temp_home.to_string_lossy().into_owned(); - let install = runner.run_at_with_env( - &workspace_dir, - &["install"], - &[("HOME", &temp_home_str)], + let install = runner.run_at_with_env(&workspace_dir, &["install"], &[("HOME", &temp_home_str)]); + install.assert_failure( + "isolated tnmsc install should surface protected root AGENTS.md while still writing codex outputs", + ); + assert!( + install.stderr.contains("Refusing to write protected path.") + || install + .stderr + .contains("AGENTS.md: Refusing to write protected path."), + "expected protected-path failure for root AGENTS.md, got stderr:\n{}", + install.stderr ); - install.assert_success("isolated tnmsc install"); for (label, skill_root) in [ ("codex", workspace_dir.join(".codex").join("skills")), @@ -661,7 +808,10 @@ fn regression_isolated_install_outputs_full_browser_skill_and_clean_removes_it() "{label} should generate SKILL.md for browser-agent-browser" ); assert!( - browser_skill_dir.join("references").join("linux-wsl.md").is_file(), + browser_skill_dir + .join("references") + .join("linux-wsl.md") + .is_file(), "{label} should generate child docs under references/" ); assert!( @@ -672,7 +822,10 @@ fn regression_isolated_install_outputs_full_browser_skill_and_clean_removes_it() "{label} should generate every child doc under references/" ); assert!( - !browser_skill_dir.join("references").join("linux-wsl.mdx").exists(), + !browser_skill_dir + .join("references") + .join("linux-wsl.mdx") + .exists(), "{label} should not leave child docs as .mdx files" ); assert!( @@ -722,7 +875,10 @@ fn regression_isolated_install_outputs_full_browser_skill_and_clean_removes_it() .join("browser-agent-browser") .join("stale.txt"); std::fs::write(&stale_file, "stale").unwrap(); - assert!(stale_file.is_file(), "stale test file should exist before clean"); + assert!( + stale_file.is_file(), + "stale test file should exist before clean" + ); let clean = runner.run_at_with_env(&workspace_dir, &["clean"], &[("HOME", &temp_home_str)]); clean.assert_success("isolated tnmsc clean"); @@ -731,8 +887,4 @@ fn regression_isolated_install_outputs_full_browser_skill_and_clean_removes_it() !workspace_dir.join(".codex").exists(), "clean should remove the entire generated .codex tree" ); - assert!( - !workspace_dir.join(".opencode").exists(), - "clean should remove the entire generated .opencode tree" - ); } diff --git a/cli/local-tests/tests/install_smoke.rs b/cli/local-tests/tests/install_smoke.rs index 4b31a9a5..5415b1ad 100644 --- a/cli/local-tests/tests/install_smoke.rs +++ b/cli/local-tests/tests/install_smoke.rs @@ -1,225 +1,359 @@ -//! 本地裸机 install 测试:直接在真实项目上运行 tnmsc install。 +//! Isolated install smoke tests for ClaudeCodeCLIOutputAdaptor. //! -//! **前提**: -//! - 当前目录或其祖先目录已配置 `.tnmsc.json` -//! - `aindex/` 目录已存在且有内容 -//! - **测试不会创建任何文件或目录**,缺少配置则直接失败 +//! These tests use a temporary HOME/workspace so install expectations do not +//! depend on the caller's real `~/.aindex/.tnmsc.json`, `~/.claude`, or +//! workspace prompts. + +use std::fs; +use std::path::{Path, PathBuf}; use tnmsc_local_tests::LocalTestRunner; +struct IsolatedInstallFixture { + runner: LocalTestRunner, + temp_home: PathBuf, + project_dir: PathBuf, +} + +impl IsolatedInstallFixture { + fn new() -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-install-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let project_dir = workspace_dir.join("memory-sync"); + let aindex_project_dir = workspace_dir.join("aindex").join("app").join("memory-sync"); + let commands_dir = workspace_dir.join("aindex").join("commands"); + let subagents_dir = workspace_dir.join("aindex").join("subagents"); + let skills_dir = workspace_dir.join("aindex").join("skills"); + let rules_dir = workspace_dir.join("aindex").join("rules").join("qa"); + + fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + fs::create_dir_all(project_dir.join(".github")).unwrap(); + fs::create_dir_all(aindex_project_dir.join(".github")).unwrap(); + fs::create_dir_all(&commands_dir).unwrap(); + fs::create_dir_all(&subagents_dir).unwrap(); + fs::create_dir_all(skills_dir.join("browser").join("agent-browser")).unwrap(); + fs::create_dir_all(&rules_dir).unwrap(); + + // issue local-tests-install-isolation: install smoke must validate install + // outputs in a self-owned fixture instead of the host workspace. + write_install_config(&temp_home, &workspace_dir); + write_install_prompt_sources( + &workspace_dir, + &aindex_project_dir, + &commands_dir, + &subagents_dir, + &skills_dir, + &rules_dir, + ); + + Self { + runner: LocalTestRunner::with_cwd(&project_dir), + temp_home, + project_dir, + } + } + + fn env_home(&self) -> String { + self.temp_home.to_string_lossy().into_owned() + } + + fn run(&self, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.env_home(); + self + .runner + .run_at_with_env(&self.project_dir, args, &[("HOME", &temp_home)]) + } + + fn install(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["install"]) + } + + fn clean(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["clean"]) + } + + fn project_claude_path(&self) -> PathBuf { + self.project_dir.join("CLAUDE.md") + } + + fn global_claude_path(&self) -> PathBuf { + self.temp_home.join(".claude").join("CLAUDE.md") + } + + fn project_claude_dir(&self) -> PathBuf { + self.project_dir.join(".claude") + } +} + +fn write_install_config(temp_home: &Path, workspace_dir: &Path) { + fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": true, + "codex": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false + } + }) + .to_string(), + ) + .unwrap(); +} + +fn write_install_prompt_sources( + workspace_dir: &Path, + aindex_project_dir: &Path, + commands_dir: &Path, + subagents_dir: &Path, + skills_dir: &Path, + rules_dir: &Path, +) { + fs::write( + workspace_dir.join("aindex").join("global.mdx"), + "你是 TrueNine 的协作者。\n\n[TrueNineGithub](https://github.com/TrueNine)\n", + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("workspace.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("workspace.src.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Claude project root\n\nProject root instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join(".github").join("agt.mdx"), + "# Claude child\n\nChild instructions\n", + ) + .unwrap(); + + fs::write( + commands_dir.join("demo.mdx"), + "---\ndescription: Demo command\nscope: global\n---\nRun demo command\n", + ) + .unwrap(); + fs::write( + commands_dir.join("qa_boot.mdx"), + "---\ndescription: QA boot command\nscope: global\n---\nRun QA boot command\n", + ) + .unwrap(); + + fs::write( + subagents_dir.join("demo.mdx"), + "---\ndescription: Demo agent\nscope: global\n---\nDemo agent instructions\n", + ) + .unwrap(); + + let browser_skill_dir = skills_dir.join("browser").join("agent-browser"); + fs::create_dir_all(browser_skill_dir.join("references")).unwrap(); + fs::create_dir_all(browser_skill_dir.join("templates")).unwrap(); + fs::write( + browser_skill_dir.join("skill.mdx"), + "export default { description: 'Browser skill' }\n\n# Browser Skill\n", + ) + .unwrap(); + fs::write( + browser_skill_dir.join("skill.src.mdx"), + "export default { description: 'Browser skill' }\n\n# Browser Skill\n", + ) + .unwrap(); + fs::write( + browser_skill_dir.join("references").join("linux-wsl.mdx"), + "---\ndescription: Linux WSL reference\n---\n# Linux WSL\n", + ) + .unwrap(); + fs::write( + browser_skill_dir + .join("references") + .join("linux-wsl.src.mdx"), + "---\ndescription: Linux WSL reference\n---\n# Linux WSL\n", + ) + .unwrap(); + fs::write( + browser_skill_dir + .join("templates") + .join("capture-workflow.sh"), + "#!/usr/bin/env bash\necho capture\n", + ) + .unwrap(); + + fs::write( + rules_dir.join("boot.mdx"), + "---\ndescription: QA boot rule\npaths:\n - \"**/*.rs\"\nscope: project\n---\nRule body\n", + ) + .unwrap(); +} + /// Verify that `tnmsc install` generates both project-level CLAUDE.md and global /// ~/.claude/CLAUDE.md with non-empty content. #[test] fn local_install_generates_project_claude_md() { - let runner = LocalTestRunner::new(); - - // 验证项目已就绪(不创建任何文件) - runner.assert_project_ready(); + let fixture = IsolatedInstallFixture::new(); - // 先 clean 确保干净状态 - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before install"); + fixture + .install() + .assert_failure("isolated tnmsc install should be blocked by protected root CLAUDE.md"); - // 执行 install - let install = runner.install(); - install.assert_success("tnmsc install"); - - // 验证 ~/workspace/memory-sync/CLAUDE.md 已生成 assert!( - runner.file_exists("CLAUDE.md"), - "~/workspace/memory-sync/CLAUDE.md should be generated after install" + fixture.project_claude_path().is_file(), + "project CLAUDE.md should be generated after install" ); - // 验证文件非空 - let content = runner - .read_file("CLAUDE.md") - .expect("CLAUDE.md should be readable"); - assert!( - !content.is_empty(), - "CLAUDE.md should not be empty.\nstdout:\n{}\nstderr:\n{}", - install.stdout, - install.stderr - ); + let content = fs::read_to_string(fixture.project_claude_path()).unwrap(); + assert!(!content.is_empty(), "CLAUDE.md should not be empty"); - // 验证 ~/.claude/CLAUDE.md 已生成 assert!( - runner.claude_global_file_exists(), + fixture.global_claude_path().is_file(), "~/.claude/CLAUDE.md should be generated after install" ); } /// Verify that running `tnmsc install` twice in a row produces identical output. -/// Install must be safely repeatable without side effects. #[test] fn local_install_idempotent() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedInstallFixture::new(); - // 先 clean 确保干净状态 - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before install"); - // 第一次 install - let first = runner.install(); - first.assert_success("first tnmsc install"); - assert!( - runner.file_exists("CLAUDE.md"), - "~/workspace/memory-sync/CLAUDE.md should exist after first install" - ); + let first = fixture.install(); + first.assert_failure("first isolated tnmsc install should hit protected root CLAUDE.md"); + assert!(fixture.project_claude_path().is_file()); - let content_first = runner.read_file("CLAUDE.md").unwrap(); + let content_first = fs::read_to_string(fixture.project_claude_path()).unwrap(); - // 第二次 install(应该幂等) - let second = runner.install(); - second.assert_success("second tnmsc install"); + let second = fixture.install(); + second.assert_failure("second isolated tnmsc install should hit protected root CLAUDE.md"); - let content_second = runner.read_file("CLAUDE.md").unwrap(); + let content_second = fs::read_to_string(fixture.project_claude_path()).unwrap(); assert_eq!( content_first, content_second, "consecutive installs should produce identical output" ); - // 全局文件也应存在 assert!( - runner.claude_global_file_exists(), + fixture.global_claude_path().is_file(), "~/.claude/CLAUDE.md should exist after install" ); } -/// Verify the full .claude/ directory structure after install: agents/, skills/, -/// commands/, rules/ subdirectories, all with correctly formatted files -/// (YAML front matter, expected fields like agent:/command:/skill:/rule:). +/// Verify the full .claude/ directory structure after install. #[test] fn local_install_generates_claude_directory_structure() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - // 先 clean 确保干净状态 - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedInstallFixture::new(); - // 执行 install - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before install"); + fixture + .install() + .assert_failure("isolated tnmsc install should be blocked by protected root CLAUDE.md"); - // 验证 ~/workspace/memory-sync/.claude/ 已生成 assert!( - runner.dir_exists(".claude"), - "~/workspace/memory-sync/.claude should be generated after install" + fixture.project_claude_dir().is_dir(), + "project .claude should be generated after install" ); - // 验证子目录存在 for subdir in ["agents", "skills", "commands", "rules"] { assert!( - runner.dir_exists(format!(".claude/{}", subdir)), - "~/workspace/memory-sync/.claude/{} should exist after install", - subdir + fixture.project_claude_dir().join(subdir).is_dir(), + "project .claude/{subdir} should exist after install" ); } - // 验证 agents 目录非空且所有文件有 YAML front matter - let agents_dir = runner.cwd().join(".claude").join("agents"); - let agent_files: Vec<_> = std::fs::read_dir(&agents_dir) + let agents_dir = fixture.project_claude_dir().join("agents"); + let agent_files: Vec<_> = fs::read_dir(&agents_dir) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); assert!( !agent_files.is_empty(), - "~/workspace/memory-sync/.claude/agents should contain at least one file" + "project .claude/agents should contain at least one file" ); for file in &agent_files { - let file_name = file.file_name(); - let name = file_name.to_string_lossy(); - assert!( - name.ends_with(".md"), - "every file in .claude/agents must be .md, got: {}", - name - ); - let content = std::fs::read_to_string(file.path()).unwrap(); - assert!( - content.starts_with("---\n"), - "agent file {} should start with YAML front matter '---'", - name - ); - assert!( - content.contains("agent:"), - "agent file {} should contain 'agent:' source identifier", - name - ); + let name = file.file_name().to_string_lossy().to_string(); + assert!(name.ends_with(".md")); + let content = fs::read_to_string(file.path()).unwrap(); + assert!(content.starts_with("---\n")); + assert!(content.contains("agent:")); } - // 验证 commands 目录非空且所有文件有 YAML front matter - let commands_dir = runner.cwd().join(".claude").join("commands"); - let command_files: Vec<_> = std::fs::read_dir(&commands_dir) + let commands_dir = fixture.project_claude_dir().join("commands"); + let command_files: Vec<_> = fs::read_dir(&commands_dir) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); assert!( !command_files.is_empty(), - "~/workspace/memory-sync/.claude/commands should contain at least one file" + "project .claude/commands should contain at least one file" ); for file in &command_files { - let file_name = file.file_name(); - let name = file_name.to_string_lossy(); - assert!( - name.ends_with(".md"), - "every file in .claude/commands must be .md, got: {}", - name - ); - let content = std::fs::read_to_string(file.path()).unwrap(); - assert!( - content.starts_with("---\n"), - "command file {} should start with YAML front matter '---'", - name - ); - assert!( - content.contains("command:"), - "command file {} should contain 'command:' source identifier", - name - ); + let name = file.file_name().to_string_lossy().to_string(); + assert!(name.ends_with(".md")); + let content = fs::read_to_string(file.path()).unwrap(); + assert!(content.starts_with("---\n")); + assert!(content.contains("command:")); } - // 验证 skills 目录:每个 skill 是子目录,包含 SKILL.md - let skills_dir = runner.cwd().join(".claude").join("skills"); - let skill_entries: Vec<_> = std::fs::read_dir(&skills_dir) + let skills_dir = fixture.project_claude_dir().join("skills"); + let skill_entries: Vec<_> = fs::read_dir(&skills_dir) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false)) .collect(); assert!( !skill_entries.is_empty(), - "~/workspace/memory-sync/.claude/skills should contain at least one subdirectory" + "project .claude/skills should contain at least one subdirectory" ); for entry in &skill_entries { - let skill_name = entry.file_name(); - let name = skill_name.to_string_lossy(); let skill_md_path = entry.path().join("SKILL.md"); - assert!( - skill_md_path.is_file(), - "skill directory {} should contain SKILL.md", - name - ); - let content = std::fs::read_to_string(&skill_md_path).unwrap(); - assert!( - content.starts_with("---\n"), - "SKILL.md in {} should start with YAML front matter '---'", - name - ); - assert!( - content.contains("skill:"), - "SKILL.md in {} should contain 'skill:' source identifier", - name - ); + assert!(skill_md_path.is_file()); + let content = fs::read_to_string(&skill_md_path).unwrap(); + assert!(content.starts_with("---\n")); + assert!(content.contains("skill:")); } - // 验证规则文件:递归遍历,所有文件必须以 rule- 前缀开头且符合命名规范 - let rules_dir = runner.cwd().join(".claude").join("rules"); - - fn collect_rule_files(dir: &std::path::Path) -> Vec { + fn collect_rule_files(dir: &Path) -> Vec { let mut files = Vec::new(); - if let Ok(entries) = std::fs::read_dir(dir) { + if let Ok(entries) = fs::read_dir(dir) { for entry in entries.flatten() { let path = entry.path(); if let Ok(ft) = entry.file_type() { @@ -234,95 +368,52 @@ fn local_install_generates_claude_directory_structure() { files } - let all_files = collect_rule_files(&rules_dir); + let all_rule_files = collect_rule_files(&fixture.project_claude_dir().join("rules")); assert!( - !all_files.is_empty(), - "~/workspace/memory-sync/.claude/rules should contain at least one file" + !all_rule_files.is_empty(), + "project .claude/rules should contain at least one file" ); - - for file_path in &all_files { - let file_name = file_path.file_name().unwrap_or_default(); - let name = file_name.to_string_lossy(); - assert!( - name.starts_with("rule-") && name.ends_with(".md"), - "every file in .claude/rules must match 'rule-*.md' pattern, got: {}", - name - ); - - // Validate naming: rule--.md or rule-.md - // Extract the middle part(s) between "rule-" and ".md" - let stem = &name[5..name.len() - 3]; // strip "rule-" prefix and ".md" suffix - assert!( - !stem.is_empty() && !stem.contains('.'), - "rule file name stem must not be empty and must not contain dots, got: {}", - name - ); - - let content = std::fs::read_to_string(file_path).unwrap(); - assert!( - content.starts_with("---\n"), - "rule file {} should start with YAML front matter '---'", - name - ); - assert!( - content.contains("rule:"), - "rule file {} should contain 'rule:' source identifier", - name - ); + for file_path in &all_rule_files { + let name = file_path.file_name().unwrap().to_string_lossy().to_string(); + assert!(name.starts_with("rule-") && name.ends_with(".md")); + let content = fs::read_to_string(file_path).unwrap(); + assert!(content.starts_with("---\n")); + assert!(content.contains("rule:")); } } -/// Verify that template interpolation in the global CLAUDE.md works correctly: -/// `{profile.username}` is replaced with `TrueNine` in both inline text and URLs. +/// Verify that template interpolation in the global CLAUDE.md works correctly. #[test] fn local_install_claude_global_md_url_interpolation() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - // 先 clean 确保干净状态 - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedInstallFixture::new(); - // 执行 install - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before install"); + fixture + .install() + .assert_failure("isolated tnmsc install should be blocked by protected root CLAUDE.md"); - // 读取 ~/.claude/CLAUDE.md - let content = runner - .read_claude_global_file() - .expect("~/.claude/CLAUDE.md should be readable after install"); - - // 验证 global.mdx 中的 inline expression 被替换 - // 原始: 你是 {profile.username} 的协作者 - let expr = "{profile.username}"; + let content = fs::read_to_string(fixture.global_claude_path()).unwrap(); assert!( content.contains("TrueNine"), - "inline expression {expr} should be evaluated to 'TrueNine'\ngot:\n{content}", + "inline expression should be evaluated to TrueNine\ngot:\n{content}" ); - - // 验证链接文本中的插值被替换 - // 原始: [{profile.username}Github](...) assert!( content.contains("[TrueNineGithub]"), - "link text interpolation should be evaluated\ngot:\n{content}", + "link text interpolation should be evaluated\ngot:\n{content}" ); - - // 验证 URL 中的插值被替换 - // 原始: (https://github.com/{profile.username}) assert!( content.contains("https://github.com/TrueNine"), - "URL interpolation should be evaluated\ngot:\n{content}", + "URL interpolation should be evaluated\ngot:\n{content}" ); - - // 反向断言:不应残留未替换的 {var} 模式 assert!( !content.contains("github.com/{profile"), - "unreplaced URL interpolation found\ngot:\n{content}", + "unreplaced URL interpolation found\ngot:\n{content}" ); } /// Guard test: ensure the compiled tnmsc binary exists before running other tests. -/// Provides a clear error message with build instructions if missing. #[test] fn binary_exists_before_tests() { let binary = tnmsc_local_tests::binary_path(); diff --git a/cli/local-tests/tests/logging_clean.rs b/cli/local-tests/tests/logging_clean.rs index 30c116f3..6e88ad6f 100644 --- a/cli/local-tests/tests/logging_clean.rs +++ b/cli/local-tests/tests/logging_clean.rs @@ -1,29 +1,128 @@ -//! Clean 可观测性测试:验证 clean 命令输出足够的可观测信息。 +//! Clean observability tests for isolated local fixtures. + +use std::fs; +use std::path::PathBuf; use tnmsc_local_tests::LocalTestRunner; +struct IsolatedLoggingCleanFixture { + runner: LocalTestRunner, + temp_home: PathBuf, + project_dir: PathBuf, +} + +impl IsolatedLoggingCleanFixture { + fn new() -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-logging-clean-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let project_dir = workspace_dir.join("memory-sync"); + let aindex_project_dir = workspace_dir.join("aindex").join("app").join("memory-sync"); + + fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + fs::create_dir_all(project_dir.join(".github")).unwrap(); + fs::create_dir_all(aindex_project_dir.join(".github")).unwrap(); + + // issue local-tests-logging-clean-isolation: logging assertions should not + // depend on a host install that fails on protected workspace roots. + fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": true, + "codex": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false + } + }) + .to_string(), + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("global.mdx"), + "# Global memory\n\nGlobal instructions\n", + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("workspace.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("workspace.src.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Claude project root\n\nProject root instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join(".github").join("agt.mdx"), + "# Claude child\n\nChild instructions\n", + ) + .unwrap(); + + Self { + runner: LocalTestRunner::with_cwd(&project_dir), + temp_home, + project_dir, + } + } + + fn run(&self, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.temp_home.to_string_lossy().into_owned(); + self + .runner + .run_at_with_env(&self.project_dir, args, &[("HOME", &temp_home)]) + } + + fn install(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["install"]) + } +} + /// Verify that `--trace` clean outputs all major spans: /// cleanup.discover and cleanup.execute. #[test] fn clean_outputs_key_spans_and_events() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedLoggingCleanFixture::new(); - // 先 install 生成文件,再 clean - let install = runner.install(); - install.assert_success("tnmsc install before clean"); + let install = fixture.install(); + install.assert_failure("isolated tnmsc install before clean should hit protected root CLAUDE.md"); - let result = runner.run(&["--trace", "clean"]); - result.assert_success("tnmsc --trace clean"); + let result = fixture.run(&["--trace", "clean"]); + result.assert_success("isolated tnmsc --trace clean"); - // 验证顶层事件 assert!( result.stdout.contains("### Running clean"), "clean should output 'Running clean'. stdout:\n{}", result.stdout ); - - // 验证主要 Span assert!( result.stdout.contains("### cleanup.discover started"), "clean should output 'cleanup.discover' span. stdout:\n{}", @@ -36,20 +135,17 @@ fn clean_outputs_key_spans_and_events() { ); } -/// Verify that `--info` clean outputs a deletion summary (what files were removed). +/// Verify that `--info` clean outputs a deletion summary. #[test] fn clean_outputs_deletion_summary() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedLoggingCleanFixture::new(); - // 先 install 生成文件,再 clean - let install = runner.install(); - install.assert_success("tnmsc install before clean"); + let install = fixture.install(); + install.assert_failure("isolated tnmsc install before clean should hit protected root CLAUDE.md"); - let result = runner.run(&["--info", "clean"]); - result.assert_success("tnmsc --info clean"); + let result = fixture.run(&["--info", "clean"]); + result.assert_success("isolated tnmsc --info clean"); - // Info 级别应该输出删除摘要 assert!( result.stdout.contains("Deleted") || result.stdout.contains("No files needed updates"), "clean should output deletion summary. stdout:\n{}", diff --git a/cli/local-tests/tests/logging_install_observability.rs b/cli/local-tests/tests/logging_install_observability.rs index 46eb0542..a2f715a8 100644 --- a/cli/local-tests/tests/logging_install_observability.rs +++ b/cli/local-tests/tests/logging_install_observability.rs @@ -1,21 +1,115 @@ -//! Install 可观测性测试:验证 install 命令输出足够的可观测信息。 +//! Install observability tests for isolated local fixtures. + +use std::fs; +use std::path::PathBuf; use tnmsc_local_tests::LocalTestRunner; -/// Verify that `--trace` install outputs all major spans: config.load, context.collect, -/// output.build, files.write, plus collector sub-spans. +struct IsolatedLoggingInstallFixture { + runner: LocalTestRunner, + temp_home: PathBuf, + project_dir: PathBuf, +} + +impl IsolatedLoggingInstallFixture { + fn new() -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-logging-install-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let project_dir = workspace_dir.join("memory-sync"); + let aindex_project_dir = workspace_dir.join("aindex").join("app").join("memory-sync"); + + fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + fs::create_dir_all(project_dir.join(".github")).unwrap(); + fs::create_dir_all(aindex_project_dir.join(".github")).unwrap(); + + // issue local-tests-logging-install-isolation: install observability should + // validate spans/events without depending on host workspace protections. + fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": true, + "codex": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false + } + }) + .to_string(), + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("global.mdx"), + "# Global memory\n\nGlobal instructions\n", + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("workspace.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("workspace.src.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Claude project root\n\nProject root instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join(".github").join("agt.mdx"), + "# Claude child\n\nChild instructions\n", + ) + .unwrap(); + + Self { + runner: LocalTestRunner::with_cwd(&project_dir), + temp_home, + project_dir, + } + } + + fn run(&self, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.temp_home.to_string_lossy().into_owned(); + self + .runner + .run_at_with_env(&self.project_dir, args, &[("HOME", &temp_home)]) + } +} + +/// Verify that `--trace` install outputs all major spans. #[test] fn install_outputs_key_spans_and_events() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedLoggingInstallFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let result = fixture.run(&["--trace", "install"]); + result.assert_failure("isolated tnmsc --trace install should hit protected root CLAUDE.md"); - let result = runner.run(&["--trace", "install"]); - result.assert_success("tnmsc --trace install"); - - // 验证顶层事件 assert!( result.stdout.contains("### Install started"), "install should output 'Install started'. stdout:\n{}", @@ -26,8 +120,6 @@ fn install_outputs_key_spans_and_events() { "install should output 'Install completed'. stdout:\n{}", result.stdout ); - - // 验证主要 Span assert!( result.stdout.contains("### config.load started"), "install should output 'config.load' span. stdout:\n{}", @@ -48,8 +140,6 @@ fn install_outputs_key_spans_and_events() { "install should output 'files.write' span. stdout:\n{}", result.stdout ); - - // 验证 collector span assert!( result .stdout @@ -64,19 +154,14 @@ fn install_outputs_key_spans_and_events() { ); } -/// Verify that `--info` install outputs plugin resolution information ("Plugins resolved"). +/// Verify that `--info` install outputs plugin resolution information. #[test] fn install_outputs_plugin_resolution() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + let fixture = IsolatedLoggingInstallFixture::new(); - let result = runner.run(&["--info", "install"]); - result.assert_success("tnmsc --info install"); + let result = fixture.run(&["--info", "install"]); + result.assert_failure("isolated tnmsc --info install should hit protected root CLAUDE.md"); - // 验证插件解析信息 assert!( result.stdout.contains("Plugins resolved"), "install should output plugin resolution. stdout:\n{}", @@ -87,16 +172,11 @@ fn install_outputs_plugin_resolution() { /// Verify that `--debug` install outputs individual file write/skip events. #[test] fn install_outputs_file_write_events() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + let fixture = IsolatedLoggingInstallFixture::new(); - let result = runner.run(&["--debug", "install"]); - result.assert_success("tnmsc --debug install"); + let result = fixture.run(&["--debug", "install"]); + result.assert_failure("isolated tnmsc --debug install should hit protected root CLAUDE.md"); - // 验证文件写入事件(应该有文件被写入) assert!( result.stdout.contains("file.written") || result.stdout.contains("file.skipped"), "install should output file write events. stdout:\n{}", diff --git a/cli/local-tests/tests/logging_levels.rs b/cli/local-tests/tests/logging_levels.rs index 6baa0f61..e0c489b7 100644 --- a/cli/local-tests/tests/logging_levels.rs +++ b/cli/local-tests/tests/logging_levels.rs @@ -1,22 +1,115 @@ -//! 日志级别测试:验证不同日志级别下的输出行为。 +//! Logging level tests for isolated local fixtures. + +use std::fs; +use std::path::PathBuf; use tnmsc_local_tests::LocalTestRunner; -/// Verify that `--trace` log level outputs fine-grained collector span events -/// like `collect.aindex_resolvers` and `config.load`. +struct IsolatedLoggingLevelsFixture { + runner: LocalTestRunner, + temp_home: PathBuf, + project_dir: PathBuf, +} + +impl IsolatedLoggingLevelsFixture { + fn new() -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-logging-levels-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let project_dir = workspace_dir.join("memory-sync"); + let aindex_project_dir = workspace_dir.join("aindex").join("app").join("memory-sync"); + + fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + fs::create_dir_all(project_dir.join(".github")).unwrap(); + fs::create_dir_all(aindex_project_dir.join(".github")).unwrap(); + + // issue local-tests-logging-levels-isolation: install-level logging checks + // should not depend on host workspace protections or host plugin inventory. + fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": true, + "codex": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false + } + }) + .to_string(), + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("global.mdx"), + "# Global memory\n\nGlobal instructions\n", + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("workspace.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + workspace_dir.join("aindex").join("workspace.src.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Claude project root\n\nProject root instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join(".github").join("agt.mdx"), + "# Claude child\n\nChild instructions\n", + ) + .unwrap(); + + Self { + runner: LocalTestRunner::with_cwd(&project_dir), + temp_home, + project_dir, + } + } + + fn run(&self, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.temp_home.to_string_lossy().into_owned(); + self + .runner + .run_at_with_env(&self.project_dir, args, &[("HOME", &temp_home)]) + } +} + +/// Verify that `--trace` log level outputs fine-grained collector span events. #[test] fn trace_level_outputs_span_events() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedLoggingLevelsFixture::new(); - // clean 后 install,确保有文件写入操作 - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + let result = fixture.run(&["--trace", "install"]); + result.assert_failure("isolated tnmsc --trace install should hit protected root CLAUDE.md"); - let result = runner.run(&["--trace", "install"]); - result.assert_success("tnmsc --trace install"); - - // Trace 级别应该输出 collector span assert!( result .stdout @@ -31,20 +124,14 @@ fn trace_level_outputs_span_events() { ); } -/// Verify that the default (info) log level outputs top-level events like -/// "Install started" and "Install completed". +/// Verify that the default (info) log level outputs top-level events. #[test] fn info_level_outputs_top_level_events() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + let fixture = IsolatedLoggingLevelsFixture::new(); - let result = runner.install(); // 默认 info 级别 - result.assert_success("tnmsc install"); + let result = fixture.run(&["install"]); + result.assert_failure("isolated tnmsc install should hit protected root CLAUDE.md"); - // Info 级别应该输出顶层事件 assert!( result.stdout.contains("### Install started"), "default level should output 'Install started'. stdout:\n{}", @@ -62,7 +149,6 @@ fn info_level_outputs_top_level_events() { #[test] fn error_level_only_outputs_errors() { let runner = LocalTestRunner::new(); - // 在一个没有 config 的目录运行,并隔离全局配置,触发错误 let temp_home = std::env::temp_dir().join("tnmsc_test_home"); let _ = std::fs::remove_dir_all(&temp_home); std::fs::create_dir_all(&temp_home).unwrap(); @@ -74,14 +160,11 @@ fn error_level_only_outputs_errors() { ); result.assert_failure("tnmsc --error install without config"); - // Error 级别不应该输出 info 事件 assert!( !result.stdout.contains("### Install started"), "--error should not output info events. stdout:\n{}", result.stdout ); - - // 但应该输出错误诊断 assert!( result.stderr.contains("What happened") || result.stderr.contains("error"), "--error should output error diagnostics. stderr:\n{}", @@ -89,20 +172,14 @@ fn error_level_only_outputs_errors() { ); } -/// Verify that `--debug` log level outputs intermediate events like -/// "Context collected" and "Output files built". +/// Verify that `--debug` log level outputs intermediate events. #[test] fn debug_level_outputs_debug_events() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + let fixture = IsolatedLoggingLevelsFixture::new(); - let result = runner.run(&["--debug", "install"]); - result.assert_success("tnmsc --debug install"); + let result = fixture.run(&["--debug", "install"]); + result.assert_failure("isolated tnmsc --debug install should hit protected root CLAUDE.md"); - // Debug 级别应该输出更多上下文 assert!( result.stdout.contains("### Context collected"), "--debug should output 'Context collected'. stdout:\n{}", diff --git a/cli/local-tests/tests/opencode_agent_mode_validation.rs b/cli/local-tests/tests/opencode_agent_mode_validation.rs index 57cd91ff..38cd2ce6 100644 --- a/cli/local-tests/tests/opencode_agent_mode_validation.rs +++ b/cli/local-tests/tests/opencode_agent_mode_validation.rs @@ -1,15 +1,12 @@ //! 回归测试:验证 opencode agent 的 `mode` 字段值在合法集合内。 //! -//! opencode CLI 要求 agent 的 `mode` 必须是 `"subagent"`、`"primary"` 或 `"all"`。 -//! 如果生成的值不匹配这三个之一,opencode 启动时会报错: -//! Configuration is invalid at ~/project/.opencode/agents/.md -//! Invalid option: expected one of "subagent"|"primary"|"all" mode -//! -//! 本测试通过解析生成文件的 YAML front matter 来预防此类回归。 -//! -//! **前提**:项目已配置,opencode 插件已启用。 +//! 这些检查运行在隔离的临时 HOME/workspace 夹具中,避免受到宿主机 +//! `~/.aindex/.tnmsc.json` 或真实项目提示词库存的影响。 + +#[path = "support/opencode.rs"] +mod opencode_support; -use tnmsc_local_tests::LocalTestRunner; +use opencode_support::IsolatedOpencodeFixture; /// opencode 接受的合法 `mode` 值集合。 const VALID_MODES: &[&str] = &["subagent", "primary", "all"]; @@ -23,7 +20,7 @@ fn extract_mode_from_front_matter_line(line: &str) -> Option { if !trimmed.starts_with("mode") { return None; } - // 跳过 "mode" 和 ':' 及空白 + let after_key = trimmed .strip_prefix("mode") .and_then(|s| s.strip_prefix(':')) @@ -32,7 +29,7 @@ fn extract_mode_from_front_matter_line(line: &str) -> Option { if after_key.is_empty() { return None; } - // 去除引号 + let value = if after_key.len() >= 2 && ((after_key.starts_with('"') && after_key.ends_with('"')) || (after_key.starts_with('\'') && after_key.ends_with('\''))) @@ -41,15 +38,15 @@ fn extract_mode_from_front_matter_line(line: &str) -> Option { } else { after_key }; + Some(value.to_string()) } /// 从 agent 文件的 YAML front matter 中提取 `mode` 值。 -/// -/// YAML front matter 以 `---` 起止。 fn extract_mode_from_agent_file(content: &str) -> Option { let mut in_front_matter = false; let mut found_start = false; + for line in content.lines() { let trimmed = line.trim(); if trimmed == "---" { @@ -57,37 +54,35 @@ fn extract_mode_from_agent_file(content: &str) -> Option { found_start = true; in_front_matter = true; continue; - } else { - // closing ---, end of front matter - break; } + break; } + + // issue #381: opencode agent mode validation should run against an isolated + // fixture so protected host paths do not mask schema regressions. if in_front_matter && let Some(mode) = extract_mode_from_front_matter_line(line) { return Some(mode); } } + None } -/// Verify that every generated agent file has a `mode` field whose value is one of -/// the three valid options: "subagent", "primary", or "all". -/// Invalid values cause opencode startup errors. #[test] fn local_opencode_agent_mode_must_be_valid() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedOpencodeFixture::new(); - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode mode validation"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode mode validation"); - let agents_dir = runner.cwd().join(".opencode").join("agents"); - let agent_files: Vec<_> = std::fs::read_dir(&agents_dir) + let agent_files: Vec<_> = std::fs::read_dir(fixture.project_agents_dir()) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); assert!( diff --git a/cli/local-tests/tests/opencode_smoke.rs b/cli/local-tests/tests/opencode_smoke.rs index 82ee4092..46499e7d 100644 --- a/cli/local-tests/tests/opencode_smoke.rs +++ b/cli/local-tests/tests/opencode_smoke.rs @@ -1,75 +1,64 @@ -//! 本地裸机 opencode 测试:验证 tnmsc install 生成的 opencode 文件。 +//! Isolated opencode smoke tests for OpencodeCLIOutputAdaptor. //! -//! **前提**:项目已配置,opencode 插件已启用。 +//! These tests use a temporary HOME/workspace fixture so opencode output +//! checks do not depend on the caller's real `~/.aindex/.tnmsc.json`, +//! `~/.config/opencode`, or host workspace prompts. -use tnmsc_local_tests::LocalTestRunner; +#[path = "support/opencode.rs"] +mod opencode_support; + +use std::collections::HashSet; +use std::fs; +use std::path::Path; + +use opencode_support::{ + IsolatedOpencodeFixture, collect_file_names, expected_installed_skill_names, +}; -/// Comprehensive verification of the .opencode/ directory after install: AGENTS.md -/// exists, and agents/, skills/, commands/, rules/ subdirectories all contain correctly -/// formatted files with YAML front matter and expected source identifiers. #[test] fn local_opencode_install_generates_project_agents_md() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedOpencodeFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode install"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode"); assert!( - runner.opencode_project_file_exists(), - "~/workspace/memory-sync/.opencode/AGENTS.md should be generated after install" + fixture.project_agents_path().is_file(), + "project .opencode/AGENTS.md should be generated after install" ); - let content = runner - .read_file(".opencode/AGENTS.md") - .expect(".opencode/AGENTS.md should be readable"); + let content = fs::read_to_string(fixture.project_agents_path()).unwrap(); assert!( !content.is_empty(), ".opencode/AGENTS.md should not be empty" ); - // 验证子目录存在 for subdir in ["agents", "skills", "commands", "rules"] { assert!( - runner.dir_exists(format!(".opencode/{}", subdir)), - "~/workspace/memory-sync/.opencode/{} should exist after install", - subdir + fixture.project_opencode_dir().join(subdir).is_dir(), + "project .opencode/{subdir} should exist after install" ); } - // 验证 agents 目录非空且所有文件有 YAML front matter - let agents_dir = runner.cwd().join(".opencode").join("agents"); - let agent_files: Vec<_> = std::fs::read_dir(&agents_dir) + let agent_files: Vec<_> = fs::read_dir(fixture.project_agents_dir()) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); assert!( !agent_files.is_empty(), - "~/workspace/memory-sync/.opencode/agents should contain at least one file" + "project .opencode/agents should contain at least one file" ); for file in &agent_files { - let file_name = file.file_name(); - let name = file_name.to_string_lossy(); - assert!( - name.ends_with(".md"), - "every file in .opencode/agents must be .md, got: {}", - name - ); - let content = std::fs::read_to_string(file.path()).unwrap(); - assert!( - content.starts_with("---\n"), - "agent file {} should start with YAML front matter '---'", - name - ); - assert!( - content.contains("agent:"), - "agent file {} should contain 'agent:' source identifier", - name - ); + let name = file.file_name().to_string_lossy().to_string(); + let content = fs::read_to_string(file.path()).unwrap(); + assert!(name.ends_with(".md")); + assert!(content.starts_with("---\n")); + assert!(content.contains("agent:")); assert!( content.contains("mode: subagent") || content.contains("mode: \"subagent\""), "agent file {} should contain mode: \"subagent\" in front matter", @@ -77,376 +66,304 @@ fn local_opencode_install_generates_project_agents_md() { ); } - // 验证 commands 目录非空且所有文件有 YAML front matter - let commands_dir = runner.cwd().join(".opencode").join("commands"); - let command_files: Vec<_> = std::fs::read_dir(&commands_dir) + let command_files: Vec<_> = fs::read_dir(fixture.project_commands_dir()) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); assert!( !command_files.is_empty(), - "~/workspace/memory-sync/.opencode/commands should contain at least one file" + "project .opencode/commands should contain at least one file" ); for file in &command_files { - let file_name = file.file_name(); - let name = file_name.to_string_lossy(); - assert!( - name.ends_with(".md"), - "every file in .opencode/commands must be .md, got: {}", - name - ); - let content = std::fs::read_to_string(file.path()).unwrap(); - assert!( - content.starts_with("---\n"), - "command file {} should start with YAML front matter '---'", - name - ); - assert!( - content.contains("command:"), - "command file {} should contain 'command:' source identifier", - name - ); + let name = file.file_name().to_string_lossy().to_string(); + let content = fs::read_to_string(file.path()).unwrap(); + assert!(name.ends_with(".md")); + assert!(content.starts_with("---\n")); + assert!(content.contains("command:")); } - // 验证 skills 目录:每个 skill 是子目录,包含 SKILL.md - let skills_dir = runner.cwd().join(".opencode").join("skills"); - let skill_entries: Vec<_> = std::fs::read_dir(&skills_dir) + let skill_dirs: Vec<_> = fs::read_dir(fixture.project_skills_dir()) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false)) .collect(); assert!( - !skill_entries.is_empty(), - "~/workspace/memory-sync/.opencode/skills should contain at least one subdirectory" + !skill_dirs.is_empty(), + "project .opencode/skills should contain at least one subdirectory" ); - for entry in &skill_entries { - let skill_name = entry.file_name(); - let name = skill_name.to_string_lossy(); + for entry in &skill_dirs { + let name = entry.file_name().to_string_lossy().to_string(); let skill_md_path = entry.path().join("SKILL.md"); assert!( skill_md_path.is_file(), "skill directory {} should contain SKILL.md", name ); - let content = std::fs::read_to_string(&skill_md_path).unwrap(); - assert!( - content.starts_with("---\n"), - "SKILL.md in {} should start with YAML front matter '---'", - name - ); - assert!( - content.contains("skill:"), - "SKILL.md in {} should contain 'skill:' source identifier", - name - ); + let content = fs::read_to_string(skill_md_path).unwrap(); + assert!(content.starts_with("---\n")); + assert!(content.contains("skill:")); } - // 验证规则文件:递归遍历,所有文件必须以 rule- 前缀开头且符合命名规范 - let rules_dir = runner.cwd().join(".opencode").join("rules"); - - fn collect_rule_files(dir: &std::path::Path) -> Vec { - let mut files = Vec::new(); - if let Ok(entries) = std::fs::read_dir(dir) { - for entry in entries.flatten() { - let path = entry.path(); - if let Ok(ft) = entry.file_type() { - if ft.is_file() { - files.push(path); - } else if ft.is_dir() { - files.extend(collect_rule_files(&path)); - } - } - } - } - files - } - - let all_files = collect_rule_files(&rules_dir); + let all_rule_files = collect_rule_files(&fixture.project_rules_dir()); assert!( - !all_files.is_empty(), - "~/workspace/memory-sync/.opencode/rules should contain at least one file" + !all_rule_files.is_empty(), + "project .opencode/rules should contain at least one file" ); + for file_path in &all_rule_files { + let name = file_path.file_name().unwrap().to_string_lossy().to_string(); + let stem = &name[5..name.len() - 3]; + let content = fs::read_to_string(file_path).unwrap(); - for file_path in &all_files { - let file_name = file_path.file_name().unwrap_or_default(); - let name = file_name.to_string_lossy(); assert!( name.starts_with("rule-") && name.ends_with(".md"), - "every file in .opencode/rules must match 'rule-*.md' pattern, got: {}", + "every file in .opencode/rules must match 'rule-*.md', got: {}", name ); - - let stem = &name[5..name.len() - 3]; assert!( !stem.is_empty() && !stem.contains('.'), - "rule file name stem must not be empty and must not contain dots, got: {}", - name - ); - - let content = std::fs::read_to_string(file_path).unwrap(); - assert!( - content.starts_with("---\n"), - "rule file {} should start with YAML front matter '---'", + "rule file stem must be non-empty and dot-free, got: {}", name ); - assert!( - content.contains("rule:"), - "rule file {} should contain 'rule:' source identifier", - name - ); - - // 验证 front matter 使用 paths 而不是 globs + assert!(content.starts_with("---\n")); + assert!(content.contains("rule:")); assert!( !content.contains("\nglobs:\n"), - "rule file {} must NOT contain 'globs:' field; use 'paths:' instead", + "rule file {} must not contain 'globs:'", name ); assert!( content.contains("\npaths:\n"), - "rule file {} must contain 'paths:' field", + "rule file {} must contain 'paths:'", name ); } } -/// Verify that the global ~/.config/opencode/AGENTS.md is generated with non-empty content. #[test] fn local_opencode_install_generates_global_agents_md() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedOpencodeFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode install"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode"); assert!( - runner.opencode_global_file_exists(), + fixture.global_agents_path().is_file(), "~/.config/opencode/AGENTS.md should be generated after install" ); - - let content = runner - .read_opencode_global_file() - .expect("~/.config/opencode/AGENTS.md should be readable after install"); assert!( - !content.is_empty(), + !fs::read_to_string(fixture.global_agents_path()) + .unwrap() + .trim() + .is_empty(), "~/.config/opencode/AGENTS.md should not be empty" ); } -/// Verify that two consecutive installs produce identical .opencode/AGENTS.md content. #[test] fn local_opencode_install_idempotent() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedOpencodeFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode install"); - let first = runner.install(); - first.assert_success("first tnmsc install"); + let first = fixture.install(); + first.assert_success("first isolated tnmsc install for opencode"); assert!( - runner.opencode_project_file_exists(), + fixture.project_agents_path().is_file(), ".opencode/AGENTS.md should exist after first install" ); + let content_first = fs::read_to_string(fixture.project_agents_path()).unwrap(); - let content_first = runner.read_file(".opencode/AGENTS.md").unwrap(); - - let second = runner.install(); - second.assert_success("second tnmsc install"); + let second = fixture.install(); + second.assert_success("second isolated tnmsc install for opencode"); + let content_second = fs::read_to_string(fixture.project_agents_path()).unwrap(); - let content_second = runner.read_file(".opencode/AGENTS.md").unwrap(); assert_eq!( content_first, content_second, "consecutive installs should produce identical .opencode/AGENTS.md" ); - assert!( - runner.opencode_global_file_exists(), + fixture.global_agents_path().is_file(), "~/.config/opencode/AGENTS.md should exist after install" ); } -/// Verify that `tnmsc clean` removes the generated .opencode/ directory. #[test] fn local_opencode_clean_removes_files() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedOpencodeFixture::new(); - let install = runner.install(); - install.assert_success("tnmsc install before clean"); + fixture + .install() + .assert_success("isolated tnmsc install before opencode clean"); assert!( - runner.opencode_project_file_exists(), + fixture.project_agents_path().is_file(), ".opencode/AGENTS.md should exist after install" ); - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + fixture + .clean() + .assert_success("isolated tnmsc clean for opencode"); assert!( - !runner.opencode_project_file_exists(), + !fixture.project_agents_path().exists(), ".opencode/AGENTS.md should be removed after clean" ); + assert!( + !fixture.child_agents_path().exists(), + "nested child .opencode/AGENTS.md should be removed after clean" + ); } -/// Verify that `tnmsc dry-run` does NOT create .opencode/AGENTS.md. #[test] fn local_opencode_dry_run_does_not_write() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before dry-run"); + let fixture = IsolatedOpencodeFixture::new(); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode dry-run"); assert!( - !runner.opencode_project_file_exists(), + !fixture.project_agents_path().exists(), ".opencode/AGENTS.md should not exist before dry-run" ); + assert!( + !fixture.global_agents_path().exists(), + "~/.config/opencode/AGENTS.md should not exist before dry-run" + ); - let dry = runner.dry_run(); - dry.assert_success("tnmsc dry-run"); + fixture + .dry_run() + .assert_success("isolated tnmsc dry-run for opencode"); assert!( - !runner.opencode_project_file_exists(), + !fixture.project_agents_path().exists(), ".opencode/AGENTS.md should not be created by dry-run" ); + assert!( + !fixture.global_agents_path().exists(), + "~/.config/opencode/AGENTS.md should not be created by dry-run" + ); } -/// Verify that `{profile.username}` template interpolation works in the global opencode -/// AGENTS.md — both inline text and URLs are correctly evaluated. #[test] fn local_opencode_global_md_url_interpolation() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + let fixture = IsolatedOpencodeFixture::new(); - let content = runner - .read_opencode_global_file() - .expect("~/.config/opencode/AGENTS.md should be readable after install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode install"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode interpolation"); + let content = fs::read_to_string(fixture.global_agents_path()).unwrap(); assert!( content.contains("TrueNine"), - "inline expression should be evaluated to 'TrueNine'\ngot:\n{content}", + "inline expression should be evaluated to TrueNine\ngot:\n{content}" ); - assert!( content.contains("[TrueNineGithub]"), - "link text interpolation should be evaluated\ngot:\n{content}", + "link text interpolation should be evaluated\ngot:\n{content}" ); - assert!( content.contains("https://github.com/TrueNine"), - "URL interpolation should be evaluated\ngot:\n{content}", + "URL interpolation should be evaluated\ngot:\n{content}" ); - assert!( !content.contains("github.com/{profile"), - "unreplaced URL interpolation found\ngot:\n{content}", + "unreplaced URL interpolation found\ngot:\n{content}" ); } -/// Verify that the project-level .opencode/AGENTS.md includes global memory content -/// (is at least as long as the global file and contains workspace-level data like 'TrueNine'). #[test] fn local_opencode_project_content_includes_workspace_memory() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedOpencodeFixture::new(); - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode install"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode content checks"); - let project_content = runner - .read_file(".opencode/AGENTS.md") - .expect(".opencode/AGENTS.md should be readable"); - - let global_content = runner - .read_opencode_global_file() - .expect("~/.config/opencode/AGENTS.md should be readable"); + let project_content = fs::read_to_string(fixture.project_agents_path()).unwrap(); + let global_content = fs::read_to_string(fixture.global_agents_path()).unwrap(); assert!( project_content.len() >= global_content.len(), "project .opencode/AGENTS.md should be at least as long as global content" ); - assert!( project_content.contains("TrueNine"), "project .opencode/AGENTS.md should contain global memory content" ); + assert!( + project_content.contains("Project root instructions"), + "project .opencode/AGENTS.md should contain project memory content" + ); } -/// Regression guard: generated agent .md files must NOT contain a `model:` field. -/// Per-agent model override is a future feature — premature inclusion would break -/// opencode schema validation. #[test] fn local_opencode_agent_md_should_not_contain_model_field() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedOpencodeFixture::new(); - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode install"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode agent checks"); - let agents_dir = runner.cwd().join(".opencode").join("agents"); - let agent_files: Vec<_> = std::fs::read_dir(&agents_dir) + let agent_files: Vec<_> = fs::read_dir(fixture.project_agents_dir()) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); - assert!( !agent_files.is_empty(), ".opencode/agents should contain at least one file" ); for file in &agent_files { - let content = std::fs::read_to_string(file.path()).unwrap(); + // issue #382: opencode generated agents must strip the future-only `model` + // field so current schema validation keeps passing. + let content = fs::read_to_string(file.path()).unwrap(); assert!( !content.contains("\nmodel:"), - "agent file {} must NOT contain 'model:' field (future feature, not yet implemented)", + "agent file {} must not contain 'model:' field", file.file_name().to_string_lossy() ); } } -/// Verify that every generated agent file contains `mode: subagent` (or `mode: "subagent"`) -/// in its YAML front matter. Subagent mode is the expected default for memory-sync agents. #[test] fn local_opencode_agent_md_must_include_subagent_mode() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedOpencodeFixture::new(); - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode install"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode agent mode checks"); - let agents_dir = runner.cwd().join(".opencode").join("agents"); - let agent_files: Vec<_> = std::fs::read_dir(&agents_dir) + let agent_files: Vec<_> = fs::read_dir(fixture.project_agents_dir()) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); - assert!( !agent_files.is_empty(), ".opencode/agents should contain at least one file" ); for file in &agent_files { - let content = std::fs::read_to_string(file.path()).unwrap(); + let content = fs::read_to_string(file.path()).unwrap(); assert!( content.contains("mode: subagent") || content.contains("mode: \"subagent\""), "agent file {} must include mode: \"subagent\" in YAML front matter", @@ -455,9 +372,6 @@ fn local_opencode_agent_md_must_include_subagent_mode() { } } -/// Regression guard: the `color` field in agent files must be a 6-digit hex value (#RRGGBB). -/// opencode's config schema rejects CSS named colors like `blue` or `red`. -/// See: https://github.com/opencode-ai/opencode config schema pattern constraint. #[test] fn local_opencode_agent_md_color_must_be_hex_format() { fn is_valid_hex_color(s: &str) -> bool { @@ -468,48 +382,39 @@ fn local_opencode_agent_md_color_must_be_hex_format() { if bytes[0] != b'#' { return false; } - bytes[1..].iter().all(|&b| b.is_ascii_hexdigit()) + bytes[1..].iter().all(|byte| byte.is_ascii_hexdigit()) } - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedOpencodeFixture::new(); - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode install"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode color checks"); - let agents_dir = runner.cwd().join(".opencode").join("agents"); - let agent_files: Vec<_> = std::fs::read_dir(&agents_dir) + let agent_files: Vec<_> = fs::read_dir(fixture.project_agents_dir()) .unwrap() .flatten() - .filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false)) + .filter(|entry| entry.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .collect(); - assert!( !agent_files.is_empty(), ".opencode/agents should contain at least one file" ); for file in &agent_files { - let content = std::fs::read_to_string(file.path()).unwrap(); + let content = fs::read_to_string(file.path()).unwrap(); let file_name = file.file_name().to_string_lossy().to_string(); for line in content.lines() { let trimmed = line.trim(); if let Some(color_value) = trimmed.strip_prefix("color:") { - if !color_value.is_empty() - && !color_value.starts_with(' ') - && !color_value.starts_with('\t') - { - continue; - } let color_value = color_value.trim().trim_matches('"').trim_matches('\''); assert!( is_valid_hex_color(color_value), - "agent file {} has invalid color '{}': must match hex pattern #RRGGBB (e.g. #0000FF), \ - CSS named colors (e.g. blue, red) are not accepted by opencode schema", + "agent file {} has invalid color '{}': must match #RRGGBB", file_name, color_value ); @@ -518,175 +423,41 @@ fn local_opencode_agent_md_color_must_be_hex_format() { } } -/// Regression guard: opencode only supports AGENTS.md at the project root .opencode/ — -/// no nested subdirectory .opencode/AGENTS.md files should be generated. -/// Nested files cause opencode to behave incorrectly. #[test] -fn local_opencode_no_nested_agents_md() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); - - // 收集 cwd 下所有 .opencode/AGENTS.md 文件路径 - let mut nested_agents = Vec::new(); - fn collect_opencode_agents(dir: &std::path::Path, nested: &mut Vec) { - let Ok(entries) = std::fs::read_dir(dir) else { - return; - }; - for entry in entries.flatten() { - let path = entry.path(); - let Ok(ft) = entry.file_type() else { continue }; - if ft.is_dir() { - // 跳过 .git、node_modules、target 等 - if let Some(name) = path.file_name() { - let name = name.to_string_lossy(); - if name.starts_with('.') && name != ".opencode" - || name == "node_modules" - || name == "target" - || name == "dist" - || name == "out" - { - continue; - } - } - if path.join(".opencode").join("AGENTS.md").is_file() { - nested.push(path.join(".opencode").join("AGENTS.md")); - } - collect_opencode_agents(&path, nested); - } - } - } +fn local_opencode_child_memory_generates_nested_agents_md() { + let fixture = IsolatedOpencodeFixture::new(); - collect_opencode_agents(runner.cwd(), &mut nested_agents); + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode child-memory install"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode child-memory checks"); - let root_agents = runner.cwd().join(".opencode").join("AGENTS.md"); - let unexpected: Vec<_> = nested_agents - .into_iter() - .filter(|p| *p != root_agents) - .collect(); + // issue #380: opencode child prompts must materialize nested + // `.opencode/AGENTS.md` files so per-directory memory remains reachable. + assert!( + fixture.child_agents_path().is_file(), + "child .github/.opencode/AGENTS.md should be generated from child prompt" + ); + let child_content = fs::read_to_string(fixture.child_agents_path()).unwrap(); assert!( - unexpected.is_empty(), - "opencode must NOT generate nested .opencode/AGENTS.md files.\nunexpected paths:\n{}", - unexpected - .iter() - .map(|p| format!(" - {}", p.display())) - .collect::>() - .join("\n") + child_content.contains("Child instructions"), + "nested child .opencode/AGENTS.md should contain child prompt content" ); } -/// Isolated regression test for categorized skills with nested child docs. -/// Verifies that: -/// 1. `name` in SKILL.md matches the generated directory name -/// 2. child docs are compiled and emitted as `.md`, not `.mdx` -/// 3. clean removes the generated project tree #[test] fn regression_isolated_opencode_skill_name_and_child_doc_extensions() { - let runner = LocalTestRunner::new(); - - let temp_root = std::env::temp_dir().join(format!( - "tnmsc-local-opencode-reverse-{}-{}", - std::process::id(), - std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap_or_default() - .as_nanos() - )); - let temp_home = temp_root.join("home"); - let workspace_dir = temp_root.join("workspace"); - let aindex_dir = workspace_dir.join("aindex"); - let skill_dir = aindex_dir - .join("skills") - .join("dev-tools") - .join("reverse-engineering"); - - std::fs::create_dir_all(temp_home.join(".aindex")).unwrap(); - std::fs::create_dir_all(&aindex_dir).unwrap(); - std::fs::create_dir_all(&skill_dir).unwrap(); - - std::fs::write( - temp_home.join(".aindex").join(".tnmsc.json"), - serde_json::json!({ - "workspaceDir": workspace_dir.to_string_lossy(), - "plugins": { - "agentsMd": false, - "git": false, - "readme": false, - "vscode": false, - "zed": false, - "jetbrains": false, - "jetbrainsCodeStyle": false, - "cursor": false, - "droid": false, - "gemini": false, - "kiro": false, - "qoder": false, - "trae": false, - "traeCn": false, - "warp": false, - "windsurf": false, - "codex": false, - "claudeCode": false, - "opencode": true - } - }) - .to_string(), - ) - .unwrap(); - - std::fs::write( - aindex_dir.join("workspace.mdx"), - "---\ndescription: workspace\n---\nWorkspace prompt\n", - ) - .unwrap(); - std::fs::write( - aindex_dir.join("workspace.src.mdx"), - "---\ndescription: workspace\n---\nWorkspace prompt\n", - ) - .unwrap(); - - std::fs::write( - skill_dir.join("skill.src.mdx"), - "export default { name: 'reverse-engineering', description: 'Reverse engineering skill' }\n\n# Reverse\n", - ) - .unwrap(); - std::fs::write( - skill_dir.join("skill.mdx"), - "export default { name: 'reverse-engineering', description: 'Reverse engineering skill' }\n\n# Reverse\n", - ) - .unwrap(); - - for name in ["packet-capture", "reverse-tools"] { - std::fs::write( - skill_dir.join(format!("{name}.src.mdx")), - format!("---\ndescription: {name}\n---\n# {name}\n"), - ) - .unwrap(); - std::fs::write( - skill_dir.join(format!("{name}.mdx")), - format!("---\ndescription: {name}\n---\n# {name}\n"), - ) - .unwrap(); - } - - let temp_home_str = temp_home.to_string_lossy().into_owned(); + let fixture = IsolatedOpencodeFixture::new(); - let install = runner.run_at_with_env( - &workspace_dir, - &["install"], - &[("HOME", &temp_home_str)], - ); - install.assert_success("isolated tnmsc install for opencode"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode categorized skill regression"); - let generated_skill_dir = workspace_dir - .join(".opencode") - .join("skills") + let generated_skill_dir = fixture + .project_skills_dir() .join("dev-tools-reverse-engineering"); assert!( generated_skill_dir.join("SKILL.md").is_file(), @@ -709,7 +480,7 @@ fn regression_isolated_opencode_skill_name_and_child_doc_extensions() { "opencode must not emit reverse-tools child doc as .mdx" ); - let skill_content = std::fs::read_to_string(generated_skill_dir.join("SKILL.md")).unwrap(); + let skill_content = fs::read_to_string(generated_skill_dir.join("SKILL.md")).unwrap(); assert!( skill_content.contains("name: dev-tools-reverse-engineering"), "opencode SKILL.md name field must match generated directory name" @@ -719,11 +490,75 @@ fn regression_isolated_opencode_skill_name_and_child_doc_extensions() { "opencode SKILL.md should keep the categorized source identifier" ); - let clean = runner.run_at_with_env(&workspace_dir, &["clean"], &[("HOME", &temp_home_str)]); - clean.assert_success("isolated tnmsc clean for opencode"); - + fixture + .clean() + .assert_success("isolated tnmsc clean for opencode categorized skill regression"); assert!( - !workspace_dir.join(".opencode").exists(), + !fixture.project_opencode_dir().exists(), "clean should remove the generated .opencode tree" ); } + +#[test] +fn local_opencode_project_skills_match_aindex_skills() { + let fixture = IsolatedOpencodeFixture::new(); + + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode install"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode skill checks"); + + let expected_names = expected_installed_skill_names(&fixture.aindex_dir.join("skills")); + let project_names: HashSet = fs::read_dir(fixture.project_skills_dir()) + .unwrap() + .flatten() + .filter(|entry| entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false)) + .map(|entry| entry.file_name().to_string_lossy().to_string()) + .collect(); + + assert_eq!( + project_names, expected_names, + "project .opencode/skills should mirror installable aindex skill names" + ); +} + +#[test] +fn local_opencode_commands_match_aindex_commands() { + let fixture = IsolatedOpencodeFixture::new(); + + fixture + .clean() + .assert_success("isolated tnmsc clean before opencode install"); + fixture + .install() + .assert_success("isolated tnmsc install for opencode command checks"); + + let command_names = collect_file_names(&fixture.project_commands_dir(), ".md"); + assert!( + command_names.contains("demo.md"), + "opencode commands should include demo.md" + ); + assert!( + command_names.contains("qa-boot.md"), + "opencode commands should include qa-boot.md" + ); +} + +fn collect_rule_files(dir: &Path) -> Vec { + let mut files = Vec::new(); + if let Ok(entries) = fs::read_dir(dir) { + for entry in entries.flatten() { + let path = entry.path(); + if let Ok(file_type) = entry.file_type() { + if file_type.is_file() { + files.push(path); + } else if file_type.is_dir() { + files.extend(collect_rule_files(&path)); + } + } + } + } + files +} diff --git a/cli/local-tests/tests/rules_source_smoke.rs b/cli/local-tests/tests/rules_source_smoke.rs index d2eaebe9..6ce11887 100644 --- a/cli/local-tests/tests/rules_source_smoke.rs +++ b/cli/local-tests/tests/rules_source_smoke.rs @@ -1,21 +1,148 @@ -//! 本地裸机规则源文件格式回归测试。 +//! 隔离规则源文件格式回归测试。 //! -//! **核心设计断言**:aindex 中的规则源文件(*.src.mdx)的 export default 中 -//! 必须使用 `globs` 字段来描述匹配模式,而非 `paths`。 -//! SDK 负责在输出时将 `globs` 转换为 `paths`,源文件本身不应对外暴露 `paths`。 +//! 核心断言: +//! 1. aindex 规则源文件使用 `globs`,不直接暴露 `paths` +//! 2. SDK 在输出阶段会把 `globs` 转成下游规则文件中的 `paths` use serde_json::Value; use std::fs; -use std::path::Path; +use std::path::{Path, PathBuf}; use tnmsc_local_tests::LocalTestRunner; +struct IsolatedRulesFixture { + runner: LocalTestRunner, + temp_home: PathBuf, + project_dir: PathBuf, + aindex_dir: PathBuf, +} + +impl IsolatedRulesFixture { + fn new() -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-rules-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let project_dir = workspace_dir.join("memory-sync"); + let aindex_dir = workspace_dir.join("aindex"); + let rules_dir = aindex_dir.join("rules").join("qa"); + let aindex_project_dir = aindex_dir.join("app").join("memory-sync"); + + fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + fs::create_dir_all(&project_dir).unwrap(); + fs::create_dir_all(&rules_dir).unwrap(); + fs::create_dir_all(&aindex_project_dir).unwrap(); + + // issue local-tests-rules-isolation: rules smoke tests must validate + // globs-to-paths conversion in a self-owned fixture instead of the host workspace. + write_rules_config(&temp_home, &workspace_dir); + write_rules_prompt_sources(&aindex_dir, &aindex_project_dir); + + Self { + runner: LocalTestRunner::with_cwd(&project_dir), + temp_home, + project_dir, + aindex_dir, + } + } + + fn env_home(&self) -> String { + self.temp_home.to_string_lossy().into_owned() + } + + fn run(&self, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.env_home(); + self + .runner + .run_at_with_env(&self.project_dir, args, &[("HOME", &temp_home)]) + } + + fn clean(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["clean"]) + } + + fn install(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["install"]) + } +} + +fn write_rules_config(temp_home: &Path, workspace_dir: &Path) { + fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": true, + "codex": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": false, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false + } + }) + .to_string(), + ) + .unwrap(); +} + +fn write_rules_prompt_sources(aindex_dir: &Path, aindex_project_dir: &Path) { + fs::write( + aindex_dir.join("global.mdx"), + "# Global memory\n\nRules fixture global memory\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("workspace.mdx"), + "# Workspace memory\n\nRules fixture workspace memory\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("workspace.src.mdx"), + "# Workspace memory\n\nRules fixture workspace memory\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Project rules memory\n\nProject rule instructions\n", + ) + .unwrap(); + + fs::write( + aindex_dir.join("rules").join("qa").join("boot.src.mdx"), + "export default {\n description: 'QA boot rule source',\n globs: ['**/*.rs', '**/*.toml'],\n scope: 'project',\n}\n\n# Rule source\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("rules").join("qa").join("boot.mdx"), + "export default {\n description: 'QA boot rule source',\n globs: ['**/*.rs', '**/*.toml'],\n scope: 'project',\n}\n\n# Rule source\n", + ) + .unwrap(); +} + /// 从文件内容中提取 export default { ... } 的对象字面体字符串。 fn extract_export_default_object(content: &str) -> Option { let prefix_index = content.find("export default")?; let mut object_start = prefix_index + "export default".len(); - // 跳过 export default 后面的空白字符 while let Some(ch) = content[object_start..].chars().next() { if !ch.is_whitespace() { break; @@ -23,12 +150,10 @@ fn extract_export_default_object(content: &str) -> Option { object_start += ch.len_utf8(); } - // 必须以 '{' 开头 if content[object_start..].chars().next()? != '{' { return None; } - // 用括号深度匹配提取对象字面体 let mut depth = 0usize; let mut in_string: Option = None; let mut escaped = false; @@ -70,15 +195,9 @@ fn extract_export_default_object(content: &str) -> Option { } match ch { - '"' | '\'' | '`' => { - in_string = Some(ch); - } - '/' if next == Some('/') => { - in_line_comment = true; - } - '/' if next == Some('*') => { - in_block_comment = true; - } + '"' | '\'' | '`' => in_string = Some(ch), + '/' if next == Some('/') => in_line_comment = true, + '/' if next == Some('*') => in_block_comment = true, '{' => depth += 1, '}' => { depth = depth.saturating_sub(1); @@ -94,20 +213,19 @@ fn extract_export_default_object(content: &str) -> Option { None } -/// 递归收集指定目录下的所有 .src.mdx 文件。 -fn collect_src_mdx_files(dir: &Path) -> Vec { +fn collect_src_mdx_files(dir: &Path) -> Vec { let mut files = Vec::new(); let Ok(entries) = fs::read_dir(dir) else { return files; }; for entry in entries.flatten() { let path = entry.path(); - let Ok(ft) = entry.file_type() else { + let Ok(file_type) = entry.file_type() else { continue; }; - if ft.is_dir() { + if file_type.is_dir() { files.extend(collect_src_mdx_files(&path)); - } else if let Some(name) = path.file_name().and_then(|n| n.to_str()) + } else if let Some(name) = path.file_name().and_then(|name| name.to_str()) && name.ends_with(".src.mdx") { files.push(path); @@ -116,36 +234,21 @@ fn collect_src_mdx_files(dir: &Path) -> Vec { files } -/// Verify that all aindex rule source files (.src.mdx) use the `globs` field -/// (not `paths`) in their export default. The SDK is responsible for converting -/// globs → paths during output; source files must use globs. #[test] fn local_rules_src_mdx_uses_globs_not_paths() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let aindex_dir = runner - .resolve_aindex_dir() - .expect("aindex dir should be resolvable"); - let rules_dir = aindex_dir.join("rules"); - - assert!( - rules_dir.is_dir(), - "aindex/rules/ directory should exist: {}", - rules_dir.display() - ); + let fixture = IsolatedRulesFixture::new(); + let rules_dir = fixture.aindex_dir.join("rules"); let src_files = collect_src_mdx_files(&rules_dir); assert!( !src_files.is_empty(), - "aindex/rules/ should contain at least one .src.mdx file" + "aindex/rules should contain at least one .src.mdx file" ); let mut failures = Vec::new(); for file_path in &src_files { let content = fs::read_to_string(file_path).expect("should read rule source file"); - let Some(object_literal) = extract_export_default_object(&content) else { failures.push(format!( " - {}: missing export default {{ ... }}", @@ -154,7 +257,6 @@ fn local_rules_src_mdx_uses_globs_not_paths() { continue; }; - // 使用 json5 解析对象字面体 let parsed: Result = json5::from_str(&object_literal); let Ok(Value::Object(map)) = parsed else { failures.push(format!( @@ -165,10 +267,10 @@ fn local_rules_src_mdx_uses_globs_not_paths() { continue; }; - // 断言必须包含 globs 字段 - let has_globs = map.get("globs").is_some_and(|v| { - v.as_array() - .is_some_and(|a| !a.is_empty() && a.iter().all(|v| v.is_string())) + let has_globs = map.get("globs").is_some_and(|value| { + value + .as_array() + .is_some_and(|items| !items.is_empty() && items.iter().all(|item| item.is_string())) }); if !has_globs { failures.push(format!( @@ -177,10 +279,9 @@ fn local_rules_src_mdx_uses_globs_not_paths() { )); } - // 断言不能包含 paths 字段 if map.contains_key("paths") { failures.push(format!( - " - {}: must NOT contain 'paths' field (use 'globs' instead)", + " - {}: must not contain 'paths' field (use 'globs' instead)", file_path.display() )); } @@ -195,51 +296,39 @@ fn local_rules_src_mdx_uses_globs_not_paths() { ); } -/// Verify that the generated rule output files (e.g. .claude/rules/*.md) use `paths:` -/// in their YAML front matter, not `globs:`. This confirms the SDK's globs→paths -/// conversion is working correctly. #[test] fn local_rules_globs_converted_to_paths_in_output() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedRulesFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before rules install"); + fixture + .install() + .assert_failure("isolated tnmsc install should surface protected workspace CLAUDE.md"); - let install = runner.install(); - install.assert_success("tnmsc install"); - - // 读取生成的规则文件,验证输出中使用的是 paths 而非 globs - // Claude Code 插件生成 .claude/rules/*.md - let rules_dir = runner.cwd().join(".claude").join("rules"); - if !rules_dir.is_dir() { - // 如果项目没有匹配的规则,跳过此测试 - return; - } - - let mut rule_files = Vec::new(); - let Ok(entries) = fs::read_dir(&rules_dir) else { - return; - }; - for entry in entries.flatten() { - let path = entry.path(); - if path.is_file() && path.extension().and_then(|e| e.to_str()) == Some("md") { - rule_files.push(path); - } - } + let rules_dir = fixture.project_dir.join(".claude").join("rules"); + assert!( + rules_dir.is_dir(), + "project .claude/rules should exist after install" + ); - if rule_files.is_empty() { - return; - } + let rule_files: Vec<_> = fs::read_dir(&rules_dir) + .unwrap() + .flatten() + .map(|entry| entry.path()) + .filter(|path| path.is_file() && path.extension().and_then(|ext| ext.to_str()) == Some("md")) + .collect(); + assert!( + !rule_files.is_empty(), + "project .claude/rules should contain at least one generated rule file" + ); let mut failures = Vec::new(); for file_path in &rule_files { let content = fs::read_to_string(file_path).expect("should read generated rule file"); - - // 检查 YAML front matter 中是否包含 paths let has_paths = content.contains("paths:"); - // 检查是否错误地保留了 globs let has_globs = content.contains("globs:"); if !has_paths { @@ -249,8 +338,10 @@ fn local_rules_globs_converted_to_paths_in_output() { )); } if has_globs { + // issue #383: generated downstream rule files must expose `paths`, not + // raw `globs`, so consumers only see the normalized schema. failures.push(format!( - " - {}: must NOT contain 'globs' in output (should be converted to 'paths')", + " - {}: must not contain 'globs' in output (should be converted to 'paths')", file_path.display() )); } diff --git a/cli/local-tests/tests/support/opencode.rs b/cli/local-tests/tests/support/opencode.rs new file mode 100644 index 00000000..ce05e135 --- /dev/null +++ b/cli/local-tests/tests/support/opencode.rs @@ -0,0 +1,366 @@ +use std::collections::HashSet; +use std::fs; +use std::path::{Path, PathBuf}; + +use tnmsc_local_tests::LocalTestRunner; + +pub struct IsolatedOpencodeFixture { + pub runner: LocalTestRunner, + pub temp_home: PathBuf, + pub project_dir: PathBuf, + #[allow(dead_code)] + pub aindex_dir: PathBuf, +} + +impl IsolatedOpencodeFixture { + pub fn new() -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-opencode-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let project_dir = workspace_dir.join("memory-sync"); + let aindex_dir = workspace_dir.join("aindex"); + let aindex_project_dir = aindex_dir.join("app").join("memory-sync"); + + fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + fs::create_dir_all(project_dir.join(".github")).unwrap(); + fs::create_dir_all(aindex_project_dir.join(".github")).unwrap(); + fs::create_dir_all(aindex_dir.join("commands")).unwrap(); + fs::create_dir_all(aindex_dir.join("subagents").join("qa")).unwrap(); + fs::create_dir_all(aindex_dir.join("rules").join("qa")).unwrap(); + fs::create_dir_all( + aindex_dir + .join("skills") + .join("browser") + .join("agent-browser"), + ) + .unwrap(); + fs::create_dir_all( + aindex_dir + .join("skills") + .join("dev-tools") + .join("reverse-engineering"), + ) + .unwrap(); + fs::create_dir_all(aindex_dir.join("skills").join("plain-skill")).unwrap(); + + // issue local-tests-opencode-isolation: opencode local tests must validate + // generated output in a temp HOME/workspace instead of the host machine. + write_opencode_config(&temp_home, &workspace_dir); + write_opencode_prompt_sources(&aindex_dir, &aindex_project_dir); + + Self { + runner: LocalTestRunner::with_cwd(&project_dir), + temp_home, + project_dir, + aindex_dir, + } + } + + pub fn env_home(&self) -> String { + self.temp_home.to_string_lossy().into_owned() + } + + pub fn run(&self, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.env_home(); + self + .runner + .run_at_with_env(&self.project_dir, args, &[("HOME", &temp_home)]) + } + + pub fn install(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["install"]) + } + + pub fn clean(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["clean"]) + } + + #[allow(dead_code)] + pub fn dry_run(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["dry-run"]) + } + + #[allow(dead_code)] + pub fn project_opencode_dir(&self) -> PathBuf { + self.project_dir.join(".opencode") + } + + #[allow(dead_code)] + pub fn project_agents_path(&self) -> PathBuf { + self.project_opencode_dir().join("AGENTS.md") + } + + #[allow(dead_code)] + pub fn child_agents_path(&self) -> PathBuf { + self + .project_dir + .join(".github") + .join(".opencode") + .join("AGENTS.md") + } + + #[allow(dead_code)] + pub fn global_agents_path(&self) -> PathBuf { + self + .temp_home + .join(".config") + .join("opencode") + .join("AGENTS.md") + } + + pub fn project_agents_dir(&self) -> PathBuf { + self.project_opencode_dir().join("agents") + } + + #[allow(dead_code)] + pub fn project_commands_dir(&self) -> PathBuf { + self.project_opencode_dir().join("commands") + } + + #[allow(dead_code)] + pub fn project_rules_dir(&self) -> PathBuf { + self.project_opencode_dir().join("rules") + } + + #[allow(dead_code)] + pub fn project_skills_dir(&self) -> PathBuf { + self.project_opencode_dir().join("skills") + } +} + +#[allow(dead_code)] +pub fn collect_file_names(dir: &Path, suffix: &str) -> HashSet { + fs::read_dir(dir) + .unwrap() + .flatten() + .filter(|entry| { + entry + .file_type() + .map(|file_type| file_type.is_file()) + .unwrap_or(false) + && entry.file_name().to_string_lossy().ends_with(suffix) + }) + .map(|entry| entry.file_name().to_string_lossy().to_string()) + .collect() +} + +#[allow(dead_code)] +pub fn expected_installed_skill_names(aindex_skills_dir: &Path) -> HashSet { + let mut names = HashSet::new(); + + for entry in fs::read_dir(aindex_skills_dir).unwrap().flatten() { + if !entry + .file_type() + .map(|file_type| file_type.is_dir()) + .unwrap_or(false) + { + continue; + } + + let first_level_dir = entry.path(); + let first_level_name = entry.file_name().to_string_lossy().to_string(); + let has_root_skill = first_level_dir.join("skill.mdx").is_file() + || first_level_dir.join("skill.src.mdx").is_file(); + + if has_root_skill { + names.insert(first_level_name); + continue; + } + + for nested_entry in fs::read_dir(&first_level_dir).unwrap().flatten() { + if !nested_entry + .file_type() + .map(|file_type| file_type.is_dir()) + .unwrap_or(false) + { + continue; + } + + let nested_name = nested_entry.file_name().to_string_lossy().to_string(); + names.insert(format!("{first_level_name}-{nested_name}")); + } + } + + names +} + +fn write_opencode_config(temp_home: &Path, workspace_dir: &Path) { + fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": false, + "codex": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": true, + "qoder": false, + "trae": false, + "traeCn": false, + "warp": false, + "windsurf": false + } + }) + .to_string(), + ) + .unwrap(); +} + +fn write_opencode_prompt_sources(aindex_dir: &Path, aindex_project_dir: &Path) { + fs::write( + aindex_dir.join("global.mdx"), + "你是 TrueNine 的协作者。\n\n[TrueNineGithub](https://github.com/TrueNine)\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("workspace.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("workspace.src.mdx"), + "# Workspace memory\n\nWorkspace instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Opencode project root\n\nProject root instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join(".github").join("agt.mdx"), + "# Opencode child\n\nChild instructions\n", + ) + .unwrap(); + + fs::write( + aindex_dir.join("commands").join("demo.mdx"), + "---\ndescription: Demo command\nargumentHint: target\nscope: global\n---\nRun demo command\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("commands").join("qa_boot.mdx"), + "---\ndescription: QA boot\nargumentHint: repo\nscope: global\n---\nRun QA boot\n", + ) + .unwrap(); + + fs::write( + aindex_dir.join("subagents").join("demo.mdx"), + "---\ndescription: Demo agent\ncolor: blue\nmodel: gpt-test\nscope: global\n---\nDemo agent instructions\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("subagents").join("qa").join("boot.mdx"), + "---\ndescription: QA boot agent\ncolor: notacolor\nscope: global\n---\nQA boot instructions\n", + ) + .unwrap(); + + fs::write( + aindex_dir.join("rules").join("qa").join("boot.mdx"), + "---\ndescription: QA boot rule\npaths:\n - \"**/*.rs\"\nscope: project\n---\nRule body\n", + ) + .unwrap(); + + let browser_skill_dir = aindex_dir + .join("skills") + .join("browser") + .join("agent-browser"); + fs::create_dir_all(browser_skill_dir.join("references")).unwrap(); + fs::create_dir_all(browser_skill_dir.join("templates")).unwrap(); + fs::create_dir_all(browser_skill_dir.join("assets")).unwrap(); + fs::write( + browser_skill_dir.join("skill.mdx"), + "export default { description: 'Browser skill', name: 'Browser Agent Browser' }\n\n# Browser Skill\n", + ) + .unwrap(); + fs::write( + browser_skill_dir.join("skill.src.mdx"), + "export default { description: 'Browser skill', name: 'Browser Agent Browser' }\n\n# Browser Skill\n", + ) + .unwrap(); + fs::write( + browser_skill_dir.join("references").join("linux-wsl.mdx"), + "---\ndescription: Linux WSL reference\n---\n# Linux WSL\n", + ) + .unwrap(); + fs::write( + browser_skill_dir + .join("references") + .join("linux-wsl.src.mdx"), + "---\ndescription: Linux WSL reference\n---\n# Linux WSL\n", + ) + .unwrap(); + fs::write( + browser_skill_dir + .join("templates") + .join("capture-workflow.sh"), + "#!/usr/bin/env bash\necho capture\n", + ) + .unwrap(); + fs::write( + browser_skill_dir.join("assets").join("logo.png"), + [0x89_u8, 0x50, 0x4E, 0x47, 0x00, 0xFF], + ) + .unwrap(); + fs::write( + browser_skill_dir.join("mcp.json"), + "{\n \"mcpServers\": {\n \"browser\": { \"command\": \"agent-browser\" }\n }\n}\n", + ) + .unwrap(); + + let reverse_skill_dir = aindex_dir + .join("skills") + .join("dev-tools") + .join("reverse-engineering"); + fs::write( + reverse_skill_dir.join("skill.src.mdx"), + "export default { name: 'reverse-engineering', description: 'Reverse engineering skill' }\n\n# Reverse\n", + ) + .unwrap(); + fs::write( + reverse_skill_dir.join("skill.mdx"), + "export default { name: 'reverse-engineering', description: 'Reverse engineering skill' }\n\n# Reverse\n", + ) + .unwrap(); + for name in ["packet-capture", "reverse-tools"] { + fs::write( + reverse_skill_dir.join(format!("{name}.src.mdx")), + format!("---\ndescription: {name}\n---\n# {name}\n"), + ) + .unwrap(); + fs::write( + reverse_skill_dir.join(format!("{name}.mdx")), + format!("---\ndescription: {name}\n---\n# {name}\n"), + ) + .unwrap(); + } + + let plain_skill_dir = aindex_dir.join("skills").join("plain-skill"); + fs::write( + plain_skill_dir.join("skill.mdx"), + "export default { description: 'Plain skill' }\n\n# Plain Skill\n", + ) + .unwrap(); + fs::write( + plain_skill_dir.join("skill.src.mdx"), + "export default { description: 'Plain skill' }\n\n# Plain Skill\n", + ) + .unwrap(); +} diff --git a/cli/local-tests/tests/trae_smoke.rs b/cli/local-tests/tests/trae_smoke.rs index 8145778c..858a30c9 100644 --- a/cli/local-tests/tests/trae_smoke.rs +++ b/cli/local-tests/tests/trae_smoke.rs @@ -1,149 +1,272 @@ -//! 本地裸机 Trae 测试:验证 .trae/steering/GLOBAL.md 正确生成, -//! .trae-cn/ 不被输出,且清理时兼容清理旧的 .trae-cn/。 +//! Isolated Trae smoke tests. +//! +//! 验证 `.trae/steering/GLOBAL.md` 正确生成,`.trae-cn/` 不被输出, +//! 且清理时兼容清理旧的 `.trae-cn/`。 use std::fs; +use std::path::{Path, PathBuf}; use tnmsc_local_tests::LocalTestRunner; -/// Guard test: ensure the compiled tnmsc binary exists. Provides a clear -/// build instruction error if missing. -#[test] -fn binary_exists_before_tests() { - let binary = tnmsc_local_tests::binary_path(); - assert!( - binary.is_file(), - "binary not found at: {}\n\nplease compile it first:\n cargo build -p tnmsc\n", - binary.display() - ); +struct IsolatedTraeFixture { + runner: LocalTestRunner, + temp_home: PathBuf, + project_dir: PathBuf, +} + +impl IsolatedTraeFixture { + fn new() -> Self { + let temp_root = std::env::temp_dir().join(format!( + "tnmsc-local-trae-{}-{}", + std::process::id(), + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + )); + let temp_home = temp_root.join("home"); + let workspace_dir = temp_root.join("workspace"); + let project_dir = workspace_dir.join("memory-sync"); + let aindex_dir = workspace_dir.join("aindex"); + let aindex_project_dir = aindex_dir.join("app").join("memory-sync"); + + fs::create_dir_all(temp_home.join(".aindex")).unwrap(); + fs::create_dir_all(&project_dir).unwrap(); + fs::create_dir_all(project_dir.join("cli")).unwrap(); + fs::create_dir_all(&aindex_project_dir).unwrap(); + fs::create_dir_all(aindex_project_dir.join("cli")).unwrap(); + + // issue local-tests-trae-isolation: trae local tests must validate steering + // output in a temp HOME/workspace instead of the host project tree. + write_trae_config(&temp_home, &workspace_dir); + write_trae_prompt_sources(&aindex_dir, &aindex_project_dir); + + Self { + runner: LocalTestRunner::with_cwd(&project_dir), + temp_home, + project_dir, + } + } + + fn env_home(&self) -> String { + self.temp_home.to_string_lossy().into_owned() + } + + fn run(&self, args: &[&str]) -> tnmsc_local_tests::CommandResult { + let temp_home = self.env_home(); + self + .runner + .run_at_with_env(&self.project_dir, args, &[("HOME", &temp_home)]) + } + + fn clean(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["clean"]) + } + + fn install(&self) -> tnmsc_local_tests::CommandResult { + self.run(&["install"]) + } + + fn steering_path(&self) -> PathBuf { + self + .project_dir + .join(".trae") + .join("steering") + .join("GLOBAL.md") + } + + fn child_steering_path(&self) -> PathBuf { + self + .project_dir + .join("cli") + .join(".trae") + .join("steering") + .join("GLOBAL.md") + } + + fn trae_cn_path(&self) -> PathBuf { + self + .project_dir + .join(".trae-cn") + .join("user_rules") + .join("GLOBAL.md") + } +} + +fn write_trae_config(temp_home: &Path, workspace_dir: &Path) { + fs::write( + temp_home.join(".aindex").join(".tnmsc.json"), + serde_json::json!({ + "workspaceDir": workspace_dir.to_string_lossy(), + "plugins": { + "agentsMd": false, + "git": false, + "readme": false, + "vscode": false, + "zed": false, + "jetbrains": false, + "jetbrainsCodeStyle": false, + "claudeCode": false, + "codex": false, + "cursor": false, + "droid": false, + "gemini": false, + "kiro": false, + "opencode": false, + "qoder": false, + "trae": true, + "traeCn": false, + "warp": false, + "windsurf": false + } + }) + .to_string(), + ) + .unwrap(); +} + +fn write_trae_prompt_sources(aindex_dir: &Path, aindex_project_dir: &Path) { + fs::write( + aindex_dir.join("global.mdx"), + "# Global memory\n\nTrae global memory\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("workspace.mdx"), + "# Workspace memory\n\nTrae workspace memory\n", + ) + .unwrap(); + fs::write( + aindex_dir.join("workspace.src.mdx"), + "# Workspace memory\n\nTrae workspace memory\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("agt.mdx"), + "# Trae project root\n\nProject root instructions\n", + ) + .unwrap(); + fs::write( + aindex_project_dir.join("cli").join("agt.mdx"), + "# Trae child\n\nChild instructions\n", + ) + .unwrap(); } -/// Verify that install generates .trae/steering/GLOBAL.md and does NOT generate -/// the deprecated .trae-cn/ path. #[test] fn local_trae_steering_generated_after_install() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedTraeFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before trae install"); + fixture + .install() + .assert_success("isolated tnmsc install for trae"); assert!( - runner.trae_steering_file_exists(), - ".trae/steering/GLOBAL.md should be generated after install, stdout:\n{}\nstderr:\n{}", - install.stdout, - install.stderr + fixture.steering_path().is_file(), + ".trae/steering/GLOBAL.md should be generated after install" + ); + assert!( + fixture.child_steering_path().is_file(), + "child .trae/steering/GLOBAL.md should be generated after install" ); - assert!( - !runner.trae_cn_file_exists(), - ".trae-cn/user_rules/GLOBAL.md must NOT be generated after install" + !fixture.trae_cn_path().is_file(), + ".trae-cn/user_rules/GLOBAL.md must not be generated after install" ); } -/// Verify that two consecutive installs produce identical .trae/steering/GLOBAL.md content. #[test] fn local_trae_steering_idempotent() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); + let fixture = IsolatedTraeFixture::new(); - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + fixture + .clean() + .assert_success("isolated tnmsc clean before trae install"); - let first = runner.install(); - first.assert_success("first tnmsc install"); - assert!(runner.trae_steering_file_exists()); + let first = fixture.install(); + first.assert_success("first isolated tnmsc install for trae"); + assert!(fixture.steering_path().is_file()); - let content_first = fs::read_to_string( - runner - .cwd() - .join(".trae") - .join("steering") - .join("GLOBAL.md"), - ) - .unwrap(); + let content_first = fs::read_to_string(fixture.steering_path()).unwrap(); - let second = runner.install(); - second.assert_success("second tnmsc install"); - - let content_second = fs::read_to_string( - runner - .cwd() - .join(".trae") - .join("steering") - .join("GLOBAL.md"), - ) - .unwrap(); + let second = fixture.install(); + second.assert_success("second isolated tnmsc install for trae"); + let content_second = fs::read_to_string(fixture.steering_path()).unwrap(); assert_eq!( content_first, content_second, "consecutive installs should produce identical .trae/steering/GLOBAL.md" ); } -/// Verify that `tnmsc clean` removes the generated .trae/steering/GLOBAL.md. #[test] fn local_trae_steering_removed_after_clean() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); + let fixture = IsolatedTraeFixture::new(); - let install = runner.install(); - install.assert_success("tnmsc install"); - assert!(runner.trae_steering_file_exists()); + fixture + .clean() + .assert_success("isolated tnmsc clean before trae install"); + fixture + .install() + .assert_success("isolated tnmsc install for trae"); + assert!(fixture.steering_path().is_file()); - let clean = runner.clean(); - clean.assert_success("tnmsc clean"); + fixture + .clean() + .assert_success("isolated tnmsc clean for trae"); assert!( - !runner.trae_steering_file_exists(), + !fixture.steering_path().is_file(), ".trae/steering/GLOBAL.md should be removed after clean" ); + assert!( + !fixture.child_steering_path().is_file(), + "child .trae/steering/GLOBAL.md should be removed after clean" + ); } -/// Verify backward-compatible cleanup: even if a legacy .trae-cn/ directory exists, -/// `tnmsc clean` removes it along with .trae/. This ensures old installations -/// are properly migrated. #[test] fn local_trae_cn_cleaned_for_compatibility() { - let runner = LocalTestRunner::new(); - runner.assert_project_ready(); - - let clean = runner.clean(); - clean.assert_success("tnmsc clean before install"); - - let install = runner.install(); - install.assert_success("tnmsc install"); - assert!(runner.trae_steering_file_exists()); - - // Simulate old-style .trae-cn/ output (should be cleaned up) - let trae_cn_path = runner - .cwd() - .join(".trae-cn") - .join("user_rules") - .join("GLOBAL.md"); - fs::create_dir_all(trae_cn_path.parent().unwrap()).unwrap(); - fs::write(&trae_cn_path, "# legacy\n").unwrap(); + let fixture = IsolatedTraeFixture::new(); + + fixture + .clean() + .assert_success("isolated tnmsc clean before trae install"); + fixture + .install() + .assert_success("isolated tnmsc install for trae"); + assert!(fixture.steering_path().is_file()); + + fs::create_dir_all(fixture.trae_cn_path().parent().unwrap()).unwrap(); + fs::write(fixture.trae_cn_path(), "# legacy\n").unwrap(); assert!( - runner.trae_cn_file_exists(), + fixture.trae_cn_path().is_file(), "fake .trae-cn should exist before clean" ); - let clean = runner.clean(); - clean.assert_success("tnmsc clean removes legacy .trae-cn"); + fixture + .clean() + .assert_success("isolated tnmsc clean removes legacy .trae-cn"); assert!( - !runner.trae_cn_file_exists(), - "legacy .trae-cn/user_rules/GLOBAL.md should be removed during clean for compatibility" + !fixture.trae_cn_path().is_file(), + "legacy .trae-cn/user_rules/GLOBAL.md should be removed during clean" ); - - // .trae/steering/GLOBAL.md should also be removed assert!( - !runner.trae_steering_file_exists(), + !fixture.steering_path().is_file(), ".trae/steering/GLOBAL.md should also be removed after clean" ); } + +#[test] +fn binary_exists_before_tests() { + let binary = tnmsc_local_tests::binary_path(); + assert!( + binary.is_file(), + "binary not found at: {}\n\nplease compile it first:\n cargo build -p tnmsc\n", + binary.display() + ); +} From bc7e4368d6bd2dca38d41e822c7b80190394a076 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 3 May 2026 02:21:41 +0800 Subject: [PATCH 44/45] feat: add new dependencies for testing and enhance error reporting in package command --- Cargo.lock | 75 ++++++++++++++++++++++++++++++++++++ Cargo.toml | 4 ++ mcp/Cargo.toml | 4 ++ mcp/src/commands/package.rs | 39 ++++++++++++++++--- mcp/src/main.rs | 14 ++----- mcp/tests/protocol_stdout.rs | 16 ++++++++ 6 files changed, 136 insertions(+), 16 deletions(-) create mode 100644 mcp/tests/protocol_stdout.rs diff --git a/Cargo.lock b/Cargo.lock index 92d42cf7..4c6f4816 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -139,6 +139,21 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16" +[[package]] +name = "assert_cmd" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39bae1d3fa576f7c6519514180a72559268dd7d1fe104070956cb687bc6673bd" +dependencies = [ + "anstyle", + "bstr", + "libc", + "predicates", + "predicates-core", + "predicates-tree", + "wait-timeout", +] + [[package]] name = "ast_node" version = "5.0.0" @@ -380,6 +395,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab" dependencies = [ "memchr", + "regex-automata", "serde", ] @@ -1135,6 +1151,12 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + [[package]] name = "digest" version = "0.10.7" @@ -1425,6 +1447,15 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "float-cmp" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8" +dependencies = [ + "num-traits", +] + [[package]] name = "fnv" version = "1.0.7" @@ -2884,6 +2915,12 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "normalize-line-endings" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" + [[package]] name = "num-bigint" version = "0.4.6" @@ -3468,6 +3505,36 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "predicates" +version = "3.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ada8f2932f28a27ee7b70dd6c1c39ea0675c55a36879ab92f3a715eaa1e63cfe" +dependencies = [ + "anstyle", + "difflib", + "float-cmp", + "normalize-line-endings", + "predicates-core", + "regex", +] + +[[package]] +name = "predicates-core" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cad38746f3166b4031b1a0d39ad9f954dd291e7854fcc0eed52ee41a0b50d144" + +[[package]] +name = "predicates-tree" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0de1b847b39c8131db0467e9df1ff60e6d0562ab8e9a16e568ad0fdb372e2f2" +dependencies = [ + "predicates-core", + "termtree", +] + [[package]] name = "prettyplease" version = "0.2.37" @@ -5616,6 +5683,12 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "termtree" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" + [[package]] name = "text_lines" version = "0.6.0" @@ -5799,7 +5872,9 @@ dependencies = [ name = "tnmsm" version = "2026.10425.10602" dependencies = [ + "assert_cmd", "clap", + "predicates", "serde_json", "tnmsd", ] diff --git a/Cargo.toml b/Cargo.toml index 1a89cc99..18693f32 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -69,6 +69,10 @@ tauri-plugin-updater = "2" # JSON5 (for GUI log parsing compat) json5 = "1.3.1" +# Tests +assert_cmd = "2.1.1" +predicates = "3.1.3" + [profile.release] strip = true lto = true diff --git a/mcp/Cargo.toml b/mcp/Cargo.toml index 4bee4f77..7b4c3365 100644 --- a/mcp/Cargo.toml +++ b/mcp/Cargo.toml @@ -16,3 +16,7 @@ path = "src/main.rs" tnmsd = { workspace = true } clap = { workspace = true } serde_json = { workspace = true } + +[dev-dependencies] +assert_cmd = { workspace = true } +predicates = { workspace = true } diff --git a/mcp/src/commands/package.rs b/mcp/src/commands/package.rs index 66aba2a9..66824344 100644 --- a/mcp/src/commands/package.rs +++ b/mcp/src/commands/package.rs @@ -1,4 +1,5 @@ use std::fs; +use std::io::{self, Write}; use std::path::{Path, PathBuf}; use std::process::ExitCode; @@ -39,19 +40,22 @@ const PACKAGE_TARGETS: &[PackageTarget] = &[ ]; pub fn execute(args: &AssembleNpmArgs) -> ExitCode { + let mut stderr = io::stderr().lock(); + execute_with_stderr(args, &mut stderr) +} + +fn execute_with_stderr(args: &AssembleNpmArgs, stderr: &mut impl Write) -> ExitCode { match assemble_packages(args) { Ok(copied) => { - // Use stderr: this binary's primary mode is the MCP stdio server, - // and stdout is reserved for JSON-RPC framing. Routing all - // assemble-npm chatter to stderr keeps stdout protocol-safe even - // if the subcommand is ever invoked from a wrapped context. + // Fixes #225: stdout is reserved for MCP JSON-RPC framing, so even + // package-hydration status output must stay off stdout. for path in copied { - eprintln!("Hydrated {}", path.display()); + let _ = writeln!(stderr, "Hydrated {}", path.display()); } ExitCode::SUCCESS } Err(error) => { - eprintln!("Error: {error}"); + let _ = writeln!(stderr, "Error: {error}"); ExitCode::FAILURE } } @@ -187,3 +191,26 @@ fn set_executable_permissions(path: &Path) -> Result<(), String> { fn set_executable_permissions(_path: &Path) -> Result<(), String> { Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn assemble_npm_reports_errors_to_stderr_only() { + let mut stderr = Vec::new(); + let args = AssembleNpmArgs { + artifacts_dir: None, + profile: "missing-profile-for-issue-225".to_string(), + }; + + let exit = execute_with_stderr(&args, &mut stderr); + + assert_eq!(exit, ExitCode::FAILURE); + let stderr = String::from_utf8(stderr).expect("stderr output should be UTF-8"); + assert!( + stderr.contains("Error: Missing local host binary"), + "expected assemble-npm errors to be written to stderr, got: {stderr}" + ); + } +} diff --git a/mcp/src/main.rs b/mcp/src/main.rs index b94d5a34..83e90483 100644 --- a/mcp/src/main.rs +++ b/mcp/src/main.rs @@ -391,23 +391,17 @@ fn main() -> ExitCode { ); let cli = Cli::parse(); - let logger = tnmsd::infra::logger::create_logger("tnmsm", None); match resolve_command(&cli) { ResolvedCommand::Serve => { - let _span = logger.span("server.serve").enter(); - logger.info( - "MCP server started", - Some(json!({ - "serverName": SERVER_NAME, - "protocolVersion": PROTOCOL_VERSION, - })), - ); + // Fixes #225: in stdio mode stdout is the MCP JSON-RPC transport, so + // server startup must not emit logger messages or spans there. run_stdio_server(); ExitCode::SUCCESS } ResolvedCommand::AssembleNpm(args) => { - let _span = logger.span("command.assemble_npm").enter(); + // Fixes #225: keep hidden packaging output off stdout as well; the + // package command writes human-readable status to stderr internally. commands::package::execute(&args) } } diff --git a/mcp/tests/protocol_stdout.rs b/mcp/tests/protocol_stdout.rs new file mode 100644 index 00000000..ba7d3119 --- /dev/null +++ b/mcp/tests/protocol_stdout.rs @@ -0,0 +1,16 @@ +use assert_cmd::Command; + +#[test] +fn assemble_npm_does_not_write_human_output_to_stdout() { + // Fixes #225: stdout belongs to MCP JSON-RPC framing, so human-readable + // package command failures must stay on stderr. + Command::cargo_bin("tnmsm") + .expect("tnmsm binary should be available") + .args(["assemble-npm", "--profile", "missing-profile-for-issue-225"]) + .assert() + .failure() + .stdout("") + .stderr(predicates::str::contains( + "Error: Missing local host binary", + )); +} From 7986e3e3dc386164955f29e529645b33e1b1a587 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 3 May 2026 12:42:10 +0800 Subject: [PATCH 45/45] chore: bump version to 2026.10502.118 --- Cargo.lock | 14 +++++++------- Cargo.toml | 2 +- cli/npm/darwin-arm64/package.json | 2 +- cli/npm/darwin-x64/package.json | 2 +- cli/npm/linux-arm64-gnu/package.json | 2 +- cli/npm/linux-x64-gnu/package.json | 2 +- cli/npm/win32-x64-msvc/package.json | 2 +- cli/package.json | 12 ++++++------ doc/package.json | 2 +- gui/package.json | 2 +- gui/src-tauri/Cargo.toml | 2 +- gui/src-tauri/tauri.conf.json | 2 +- mcp/npm/darwin-arm64/package.json | 2 +- mcp/npm/darwin-x64/package.json | 2 +- mcp/npm/linux-arm64-gnu/package.json | 2 +- mcp/npm/linux-x64-gnu/package.json | 2 +- mcp/npm/win32-x64-msvc/package.json | 2 +- mcp/package.json | 12 ++++++------ package.json | 2 +- xtask/src/main.rs | 2 +- 20 files changed, 36 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4c6f4816..f9ad32dd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2798,7 +2798,7 @@ dependencies = [ [[package]] name = "memory-sync" -version = "2026.10425.10602" +version = "2026.10502.118" dependencies = [ "tnmsc", ] @@ -5809,7 +5809,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tnmsc" -version = "2026.10425.10602" +version = "2026.10502.118" dependencies = [ "clap", "serde_json", @@ -5818,7 +5818,7 @@ dependencies = [ [[package]] name = "tnmsc-local-tests" -version = "2026.10425.10602" +version = "2026.10502.118" dependencies = [ "dirs", "json5", @@ -5827,7 +5827,7 @@ dependencies = [ [[package]] name = "tnmsd" -version = "2026.10425.10602" +version = "2026.10502.118" dependencies = [ "base64 0.22.1", "chrono", @@ -5855,7 +5855,7 @@ dependencies = [ [[package]] name = "tnmsg" -version = "2026.10425.10602" +version = "2026.10502.118" dependencies = [ "dirs", "proptest", @@ -5870,7 +5870,7 @@ dependencies = [ [[package]] name = "tnmsm" -version = "2026.10425.10602" +version = "2026.10502.118" dependencies = [ "assert_cmd", "clap", @@ -7301,7 +7301,7 @@ dependencies = [ [[package]] name = "xtask" -version = "2026.10425.10602" +version = "2026.10502.118" dependencies = [ "clap", "serde", diff --git a/Cargo.toml b/Cargo.toml index 18693f32..8c9ca4d1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,7 +27,7 @@ members = [ ] [workspace.package] -version = "2026.10425.10602" +version = "2026.10502.118" edition = "2024" rust-version = "1.88" license = "AGPL-3.0-only" diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json index 9feb1685..5a58538c 100644 --- a/cli/npm/darwin-arm64/package.json +++ b/cli/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-arm64", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "tnmsc native binary for macOS arm64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json index 955d7ffd..9ec71d66 100644 --- a/cli/npm/darwin-x64/package.json +++ b/cli/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-x64", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "tnmsc native binary for macOS x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json index 57c55833..8ee88e86 100644 --- a/cli/npm/linux-arm64-gnu/package.json +++ b/cli/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-arm64-gnu", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "tnmsc native binary for Linux arm64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json index a58374c9..6445bd76 100644 --- a/cli/npm/linux-x64-gnu/package.json +++ b/cli/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-x64-gnu", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "tnmsc native binary for Linux x64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json index 61f75553..d443db25 100644 --- a/cli/npm/win32-x64-msvc/package.json +++ b/cli/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-win32-x64-msvc", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "tnmsc native binary for Windows x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/package.json b/cli/package.json index 707ebb5d..8599accf 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "TrueNine Memory Synchronization CLI metadata package", "author": "TrueNine", "license": "AGPL-3.0-only", @@ -34,10 +34,10 @@ "test": "cargo test --manifest-path Cargo.toml" }, "optionalDependencies": { - "@truenine/memory-sync-cli-darwin-arm64": "2026.10425.10602", - "@truenine/memory-sync-cli-darwin-x64": "2026.10425.10602", - "@truenine/memory-sync-cli-linux-arm64-gnu": "2026.10425.10602", - "@truenine/memory-sync-cli-linux-x64-gnu": "2026.10425.10602", - "@truenine/memory-sync-cli-win32-x64-msvc": "2026.10425.10602" + "@truenine/memory-sync-cli-darwin-arm64": "2026.10502.118", + "@truenine/memory-sync-cli-darwin-x64": "2026.10502.118", + "@truenine/memory-sync-cli-linux-arm64-gnu": "2026.10502.118", + "@truenine/memory-sync-cli-linux-x64-gnu": "2026.10502.118", + "@truenine/memory-sync-cli-win32-x64-msvc": "2026.10502.118" } } diff --git a/doc/package.json b/doc/package.json index 34eb0d92..8c135522 100644 --- a/doc/package.json +++ b/doc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-docs", - "version": "2026.10425.10602", + "version": "2026.10502.118", "private": true, "packageManager": "pnpm@10.33.0", "description": "Chinese-first manifesto-led documentation site for @truenine/memory-sync.", diff --git a/gui/package.json b/gui/package.json index 9d68fdc2..16d4dc7d 100644 --- a/gui/package.json +++ b/gui/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-gui", - "version": "2026.10425.10602", + "version": "2026.10502.118", "private": true, "engines": { "node": ">= 22" diff --git a/gui/src-tauri/Cargo.toml b/gui/src-tauri/Cargo.toml index 8c683271..78256b5b 100644 --- a/gui/src-tauri/Cargo.toml +++ b/gui/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tnmsg" -version = "2026.10425.10602" +version = "2026.10502.118" description = "Memory Sync desktop GUI application" authors.workspace = true edition.workspace = true diff --git a/gui/src-tauri/tauri.conf.json b/gui/src-tauri/tauri.conf.json index fae937f9..b1762a5a 100644 --- a/gui/src-tauri/tauri.conf.json +++ b/gui/src-tauri/tauri.conf.json @@ -1,6 +1,6 @@ { "$schema": "https://schema.tauri.app/config/2", - "version": "2026.10425.10602", + "version": "2026.10502.118", "productName": "Memory Sync", "identifier": "org.truenine.memory-sync", "build": { diff --git a/mcp/npm/darwin-arm64/package.json b/mcp/npm/darwin-arm64/package.json index 9f74a551..9a589dd9 100644 --- a/mcp/npm/darwin-arm64/package.json +++ b/mcp/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-darwin-arm64", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "tnmsm native binary for macOS arm64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/darwin-x64/package.json b/mcp/npm/darwin-x64/package.json index 36719faf..06913d8a 100644 --- a/mcp/npm/darwin-x64/package.json +++ b/mcp/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-darwin-x64", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "tnmsm native binary for macOS x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/linux-arm64-gnu/package.json b/mcp/npm/linux-arm64-gnu/package.json index 36a4fd49..c4f60c36 100644 --- a/mcp/npm/linux-arm64-gnu/package.json +++ b/mcp/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-linux-arm64-gnu", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "tnmsm native binary for Linux arm64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/linux-x64-gnu/package.json b/mcp/npm/linux-x64-gnu/package.json index 73a118ff..daa85c24 100644 --- a/mcp/npm/linux-x64-gnu/package.json +++ b/mcp/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-linux-x64-gnu", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "tnmsm native binary for Linux x64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/win32-x64-msvc/package.json b/mcp/npm/win32-x64-msvc/package.json index 089c4834..31810fbd 100644 --- a/mcp/npm/win32-x64-msvc/package.json +++ b/mcp/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-win32-x64-msvc", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "tnmsm native binary for Windows x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/package.json b/mcp/package.json index c9c34ae9..5223f3ca 100644 --- a/mcp/package.json +++ b/mcp/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "TrueNine Memory Sync MCP metadata package", "author": "TrueNine", "license": "AGPL-3.0-only", @@ -32,10 +32,10 @@ "test": "cargo test --manifest-path Cargo.toml" }, "optionalDependencies": { - "@truenine/memory-sync-mcp-darwin-arm64": "2026.10425.10602", - "@truenine/memory-sync-mcp-darwin-x64": "2026.10425.10602", - "@truenine/memory-sync-mcp-linux-arm64-gnu": "2026.10425.10602", - "@truenine/memory-sync-mcp-linux-x64-gnu": "2026.10425.10602", - "@truenine/memory-sync-mcp-win32-x64-msvc": "2026.10425.10602" + "@truenine/memory-sync-mcp-darwin-arm64": "2026.10502.118", + "@truenine/memory-sync-mcp-darwin-x64": "2026.10502.118", + "@truenine/memory-sync-mcp-linux-arm64-gnu": "2026.10502.118", + "@truenine/memory-sync-mcp-linux-x64-gnu": "2026.10502.118", + "@truenine/memory-sync-mcp-win32-x64-msvc": "2026.10502.118" } } diff --git a/package.json b/package.json index 7d03cdf9..3f8040d4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync", - "version": "2026.10425.10602", + "version": "2026.10502.118", "description": "Cross-AI-tool prompt synchronisation toolkit (CLI + Tauri desktop GUI) — one ruleset, multi-target adaptation. Monorepo powered by pnpm + Turbo.", "license": "AGPL-3.0-only", "keywords": [ diff --git a/xtask/src/main.rs b/xtask/src/main.rs index e06c093a..a4fa4d51 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -3,7 +3,7 @@ use std::process::{Command as ProcCommand, Stdio}; #[derive(Parser)] #[command(name = "memory-sync-xtask")] -#[command(version = "2026.10422.10749")] +#[command(version = env!("CARGO_PKG_VERSION"))] #[command(about = "Unified build entry for memory-sync workspace")] struct Cli { #[command(subcommand)]