From 35d03ecd2d03ad909f6d34d003d570db83462e7f Mon Sep 17 00:00:00 2001 From: TrueNine Date: Fri, 24 Apr 2026 19:28:28 +0800 Subject: [PATCH 1/3] fix: build release binaries before packaging smoke tests in CI --- .github/workflows/ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8e281253..6b30e439 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -89,6 +89,9 @@ jobs: with: cache-key: ci-packaging-smoke + - name: Build release binaries (for packaging smoke) + run: cargo build --release -p tnmsc -p tnmsm + - name: CLI packaging smoke run: cargo test -p tnmsc-integrate-tests packaging_smoke_covers_release_binary_and_global_install -- --exact --nocapture From 211066da6f5dc3c9c92f490be72cca82a4fb8c92 Mon Sep 17 00:00:00 2001 From: TrueNine Date: Sat, 25 Apr 2026 09:48:44 +0800 Subject: [PATCH 2/3] refactor: extract shared code to sdk, fix local test framework bugs - Move strip_unc_prefix, build_global_scope, collect_context to sdk/src/services/common.rs - Restructure logger into sdk/src/infra/logger/ module (core, diagnostic, formatter, sink) - Rename command_diagnostics.rs -> command_diagnostics_service.rs, prompts.rs -> prompt_service.rs - Add OutputContext, CleanupSnapshot domain types - Extract git_fs module from git_discovery - Pass &Logger to collect_context and build_output_files - Fix PoisonError cascade in local tests: use unwrap_or_else for Mutex - Add cross-process file lock to prevent test binary interference - Add clean-before-install in 3 tests that lacked it - Fix dry_run encoding: use file.encoding.clone() instead of None - Remove unused mut, functions, fields (compiler warnings) - Add logging test files for observability coverage --- cli/local-tests/src/lib.rs | 69 +- cli/local-tests/tests/claude_smoke.rs | 3 + cli/local-tests/tests/clean_blackbox.rs | 8 + cli/local-tests/tests/logging_clean.rs | 55 + cli/local-tests/tests/logging_dry_run.rs | 52 + .../tests/logging_error_feedback.rs | 63 + .../tests/logging_install_observability.rs | 99 ++ cli/local-tests/tests/logging_levels.rs | 106 ++ cli/src/cli.rs | 20 - cli/src/commands/pipeline.rs | 50 +- cli/src/lib.rs | 12 +- cli/src/logger.rs | 74 -- mcp/src/main.rs | 22 +- sdk/src/context/mod.rs | 4 +- sdk/src/domain/base_output_plans.rs | 6 +- sdk/src/domain/cleanup.rs | 141 ++ sdk/src/domain/config/mod.rs | 10 +- sdk/src/domain/mod.rs | 7 + sdk/src/domain/output_context.rs | 53 + .../output_plans/claude_code_output_plan.rs | 4 +- .../domain/output_plans/codex_output_plan.rs | 13 +- .../domain/output_plans/cursor_output_plan.rs | 4 +- .../domain/output_plans/droid_output_plan.rs | 6 +- .../domain/output_plans/gemini_output_plan.rs | 4 +- .../generic_skills_output_plan.rs | 4 +- ...etbrains_ai_assistant_codex_output_plan.rs | 4 +- .../domain/output_plans/kiro_output_plan.rs | 4 +- .../output_plans/opencode_output_plan.rs | 4 +- .../domain/output_plans/qoder_output_plan.rs | 4 +- .../domain/output_plans/trae_output_plan.rs | 4 +- .../domain/output_plans/warp_output_plan.rs | 4 +- .../output_plans/windsurf_output_plan.rs | 4 +- sdk/src/infra/git_fs.rs | 196 +++ sdk/src/infra/logger.rs | 1175 ----------------- sdk/src/infra/logger/core.rs | 263 ++++ sdk/src/infra/logger/diagnostic.rs | 217 +++ sdk/src/infra/logger/formatter.rs | 228 ++++ sdk/src/infra/logger/mod.rs | 305 +++++ sdk/src/infra/logger/sink.rs | 133 ++ sdk/src/infra/mod.rs | 2 + sdk/src/infra/script_runtime.rs | 8 +- sdk/src/lib.rs | 2 +- sdk/src/policy/cleanup.rs | 208 +-- sdk/src/policy/git_discovery.rs | 193 +-- sdk/src/policy/path_blocking.rs | 7 +- sdk/src/services/clean_service.rs | 745 ++--------- ...tics.rs => command_diagnostics_service.rs} | 12 +- sdk/src/services/common.rs | 521 ++++++++ sdk/src/services/dry_run_service.rs | 559 ++------ sdk/src/services/install_service.rs | 605 ++------- sdk/src/services/mod.rs | 5 +- .../{prompts.rs => prompt_service.rs} | 23 +- 52 files changed, 3022 insertions(+), 3302 deletions(-) create mode 100644 cli/local-tests/tests/logging_clean.rs create mode 100644 cli/local-tests/tests/logging_dry_run.rs create mode 100644 cli/local-tests/tests/logging_error_feedback.rs create mode 100644 cli/local-tests/tests/logging_install_observability.rs create mode 100644 cli/local-tests/tests/logging_levels.rs delete mode 100644 cli/src/logger.rs create mode 100644 sdk/src/domain/cleanup.rs create mode 100644 sdk/src/domain/output_context.rs create mode 100644 sdk/src/infra/git_fs.rs delete mode 100644 sdk/src/infra/logger.rs create mode 100644 sdk/src/infra/logger/core.rs create mode 100644 sdk/src/infra/logger/diagnostic.rs create mode 100644 sdk/src/infra/logger/formatter.rs create mode 100644 sdk/src/infra/logger/mod.rs create mode 100644 sdk/src/infra/logger/sink.rs rename sdk/src/services/{command_diagnostics.rs => command_diagnostics_service.rs} (92%) create mode 100644 sdk/src/services/common.rs rename sdk/src/services/{prompts.rs => prompt_service.rs} (97%) diff --git a/cli/local-tests/src/lib.rs b/cli/local-tests/src/lib.rs index 0bc9b510..f795b84a 100644 --- a/cli/local-tests/src/lib.rs +++ b/cli/local-tests/src/lib.rs @@ -4,6 +4,7 @@ use std::fs; use std::path::{Path, PathBuf}; use std::process::{Command, Output}; use std::sync::{Mutex, OnceLock}; +use std::time::Duration; static BINARY_BUILT: OnceLock<()> = OnceLock::new(); static PROJECT_LOCK: OnceLock> = OnceLock::new(); @@ -38,6 +39,7 @@ pub struct LocalTestRunner { binary: PathBuf, cwd: PathBuf, _lock_guard: std::sync::MutexGuard<'static, ()>, + _file_lock: CrossProcessLock, } impl LocalTestRunner { @@ -45,11 +47,13 @@ impl LocalTestRunner { /// 若该目录不存在,则回退到当前目录。 pub fn new() -> Self { ensure_binary(); - // 所有测试共享同一个真实项目目录,必须串行执行 + // Cross-process lock: serialises test binaries sharing the same project + let file_lock = acquire_cross_process_lock(); + // In-process lock: serialises tests within a single binary let guard = PROJECT_LOCK .get_or_init(|| Mutex::new(())) .lock() - .expect("project lock should not be poisoned"); + .unwrap_or_else(|e| e.into_inner()); let default_project = home_dir().join("workspace").join("memory-sync"); let cwd = if default_project.is_dir() { default_project @@ -60,15 +64,17 @@ impl LocalTestRunner { binary: binary_path(), cwd, _lock_guard: guard, + _file_lock: file_lock, } } pub fn with_cwd(cwd: impl AsRef) -> Self { ensure_binary(); + let file_lock = acquire_cross_process_lock(); let guard = PROJECT_LOCK .get_or_init(|| Mutex::new(())) .lock() - .expect("project lock should not be poisoned"); + .unwrap_or_else(|e| e.into_inner()); let cwd = cwd.as_ref().to_path_buf(); assert!( cwd.is_dir(), @@ -79,6 +85,7 @@ impl LocalTestRunner { binary: binary_path(), cwd, _lock_guard: guard, + _file_lock: file_lock, } } @@ -126,6 +133,21 @@ impl LocalTestRunner { command_output(&mut cmd, &format!("tnmsc {}", args.join(" "))) } + /// 在指定目录下运行 tnmsc 命令,并设置额外环境变量。 + pub fn run_at_with_env( + &self, + cwd: impl AsRef, + args: &[&str], + envs: &[(&str, &str)], + ) -> CommandResult { + let mut cmd = Command::new(&self.binary); + cmd.args(args).current_dir(cwd.as_ref()); + for (k, v) in envs { + cmd.env(k, v); + } + command_output(&mut cmd, &format!("tnmsc {}", args.join(" "))) + } + pub fn run_success(&self, args: &[&str]) -> CommandResult { let result = self.run(args); result.assert_success(&format!("tnmsc {}", args.join(" "))); @@ -341,6 +363,47 @@ impl LocalTestRunner { } } +// --------------------------------------------------------------------------- +// Cross-process file lock — prevents test binaries from interfering with each +// other when running local tests on the shared project directory. +// --------------------------------------------------------------------------- + +pub struct CrossProcessLock(Option); + +impl Drop for CrossProcessLock { + fn drop(&mut self) { + if let Some(path) = self.0.take() { + let _ = std::fs::remove_file(&path); + } + } +} + +fn acquire_cross_process_lock() -> CrossProcessLock { + let lock_path = home_dir().join(".tnmsc_local_test_lock"); + loop { + match std::fs::File::create_new(&lock_path) { + Ok(_) => return CrossProcessLock(Some(lock_path)), + Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => { + // Stale-lock detection: if older than 5 minutes, remove and retry + if let Ok(meta) = std::fs::metadata(&lock_path) { + if let Ok(created) = meta.created() { + if let Ok(elapsed) = created.elapsed() { + if elapsed > Duration::from_secs(300) { + let _ = std::fs::remove_file(&lock_path); + continue; + } + } + } + } + std::thread::sleep(Duration::from_millis(200)); + } + Err(_) => { + std::thread::sleep(Duration::from_millis(200)); + } + } + } +} + pub fn ensure_binary() { let binary = binary_path(); diff --git a/cli/local-tests/tests/claude_smoke.rs b/cli/local-tests/tests/claude_smoke.rs index 30b92555..202b7c55 100644 --- a/cli/local-tests/tests/claude_smoke.rs +++ b/cli/local-tests/tests/claude_smoke.rs @@ -101,6 +101,9 @@ fn local_claude_clean_removes_all_project_files() { let runner = LocalTestRunner::new(); runner.assert_project_ready(); + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + let install = runner.install(); install.assert_success("tnmsc install before clean"); diff --git a/cli/local-tests/tests/clean_blackbox.rs b/cli/local-tests/tests/clean_blackbox.rs index c5897eb8..78c814a5 100644 --- a/cli/local-tests/tests/clean_blackbox.rs +++ b/cli/local-tests/tests/clean_blackbox.rs @@ -22,6 +22,10 @@ fn local_clean_removes_project_claude_md() { let runner = LocalTestRunner::new(); runner.assert_project_ready(); + // 先 clean 再 install 确保可复现 + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + // 先 install 生成文件 let install = runner.install(); install.assert_success("tnmsc install before clean"); @@ -45,6 +49,10 @@ fn local_clean_dry_run_does_not_remove_files() { let runner = LocalTestRunner::new(); runner.assert_project_ready(); + // 先 clean 再 install 确保可复现 + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + // 先 install 生成文件 let install = runner.install(); install.assert_success("tnmsc install before dry-run clean"); diff --git a/cli/local-tests/tests/logging_clean.rs b/cli/local-tests/tests/logging_clean.rs new file mode 100644 index 00000000..001402dc --- /dev/null +++ b/cli/local-tests/tests/logging_clean.rs @@ -0,0 +1,55 @@ +//! Clean 可观测性测试:验证 clean 命令输出足够的可观测信息。 + +use tnmsc_local_tests::LocalTestRunner; + +#[test] +fn clean_outputs_key_spans_and_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + // 先 install 生成文件,再 clean + let install = runner.install(); + install.assert_success("tnmsc install before clean"); + + let result = runner.run(&["--trace", "clean"]); + result.assert_success("tnmsc --trace clean"); + + // 验证顶层事件 + assert!( + result.stdout.contains("### Running clean"), + "clean should output 'Running clean'. stdout:\n{}", + result.stdout + ); + + // 验证主要 Span + assert!( + result.stdout.contains("### cleanup.discover started"), + "clean should output 'cleanup.discover' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### cleanup.execute started"), + "clean should output 'cleanup.execute' span. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn clean_outputs_deletion_summary() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + // 先 install 生成文件,再 clean + let install = runner.install(); + install.assert_success("tnmsc install before clean"); + + let result = runner.run(&["--info", "clean"]); + result.assert_success("tnmsc --info clean"); + + // Info 级别应该输出删除摘要 + assert!( + result.stdout.contains("Deleted") || result.stdout.contains("No files needed updates"), + "clean should output deletion summary. stdout:\n{}", + result.stdout + ); +} diff --git a/cli/local-tests/tests/logging_dry_run.rs b/cli/local-tests/tests/logging_dry_run.rs new file mode 100644 index 00000000..6e1c1b17 --- /dev/null +++ b/cli/local-tests/tests/logging_dry_run.rs @@ -0,0 +1,52 @@ +//! Dry-run 可观测性测试:验证 dry-run 命令输出足够的可观测信息。 + +use tnmsc_local_tests::LocalTestRunner; + +#[test] +fn dry_run_outputs_key_spans_and_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let result = runner.run(&["--trace", "dry-run"]); + result.assert_success("tnmsc --trace dry-run"); + + // 验证顶层事件 + assert!( + result.stdout.contains("### Running dry-run"), + "dry-run should output 'Running dry-run'. stdout:\n{}", + result.stdout + ); + + // 验证主要 Span + assert!( + result.stdout.contains("### config.load started"), + "dry-run should output 'config.load' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### context.collect started"), + "dry-run should output 'context.collect' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### output.build started"), + "dry-run should output 'output.build' span. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn dry_run_outputs_plan_preview() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let result = runner.run(&["--info", "dry-run"]); + result.assert_success("tnmsc --info dry-run"); + + // Info 级别应该输出计划摘要 + assert!( + result.stdout.contains("Planned") || result.stdout.contains("No files needed updates"), + "dry-run should output plan summary. stdout:\n{}", + result.stdout + ); +} diff --git a/cli/local-tests/tests/logging_error_feedback.rs b/cli/local-tests/tests/logging_error_feedback.rs new file mode 100644 index 00000000..952bd969 --- /dev/null +++ b/cli/local-tests/tests/logging_error_feedback.rs @@ -0,0 +1,63 @@ +//! 错误反馈测试:验证错误时输出结构化诊断信息。 + +use std::fs; +use tnmsc_local_tests::LocalTestRunner; + +fn run_without_global_config( + runner: &LocalTestRunner, + args: &[&str], +) -> tnmsc_local_tests::CommandResult { + let temp_home = std::env::temp_dir().join("tnmsc_test_home"); + let _ = fs::remove_dir_all(&temp_home); + fs::create_dir_all(&temp_home).unwrap(); + // Point TNMSC_CONFIG_PATH to a non-existent file so global config is not found. + let fake_config = temp_home.join(".tnmsc.json"); + runner.run_at_with_env( + std::env::temp_dir(), + args, + &[("TNMSC_CONFIG_PATH", fake_config.to_str().unwrap())], + ) +} + +#[test] +fn missing_config_outputs_diagnostic_with_fix() { + let runner = LocalTestRunner::new(); + // 在临时目录运行(没有 .tnmsc.json),并隔离全局配置 + let result = run_without_global_config(&runner, &["install"]); + result.assert_failure("install without config"); + + // 验证诊断结构存在 + assert!( + result.stderr.contains("What happened") || result.stdout.contains("What happened"), + "error should contain 'What happened' section. stdout:\n{}\nstderr:\n{}", + result.stdout, result.stderr + ); + + // 验证有修复建议(嵌入在错误消息中) + assert!( + result.stderr.contains("Please create it") || result.stdout.contains("Please create it"), + "error should contain fix suggestion. stdout:\n{}\nstderr:\n{}", + result.stdout, result.stderr + ); + + // 验证提及配置文件 + assert!( + result.stderr.contains(".tnmsc.json") || result.stdout.contains(".tnmsc.json"), + "error should mention .tnmsc.json. stdout:\n{}\nstderr:\n{}", + result.stdout, result.stderr + ); +} + +#[test] +fn missing_config_at_error_level_shows_diagnostic() { + let runner = LocalTestRunner::new(); + let result = run_without_global_config(&runner, &["--error", "install"]); + result.assert_failure("install without config at error level"); + + // Error 级别也应该显示诊断 + assert!( + result.stderr.contains("What happened"), + "--error should still show diagnostic. stderr:\n{}", + result.stderr + ); +} diff --git a/cli/local-tests/tests/logging_install_observability.rs b/cli/local-tests/tests/logging_install_observability.rs new file mode 100644 index 00000000..1993c75f --- /dev/null +++ b/cli/local-tests/tests/logging_install_observability.rs @@ -0,0 +1,99 @@ +//! Install 可观测性测试:验证 install 命令输出足够的可观测信息。 + +use tnmsc_local_tests::LocalTestRunner; + +#[test] +fn install_outputs_key_spans_and_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean before install"); + + let result = runner.run(&["--trace", "install"]); + result.assert_success("tnmsc --trace install"); + + // 验证顶层事件 + assert!( + result.stdout.contains("### Install started"), + "install should output 'Install started'. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### Install completed"), + "install should output 'Install completed'. stdout:\n{}", + result.stdout + ); + + // 验证主要 Span + assert!( + result.stdout.contains("### config.load started"), + "install should output 'config.load' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### context.collect started"), + "install should output 'context.collect' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### output.build started"), + "install should output 'output.build' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### files.write started"), + "install should output 'files.write' span. stdout:\n{}", + result.stdout + ); + + // 验证 collector span + assert!( + result.stdout.contains("### collect.aindex_resolvers started"), + "install should output 'collect.aindex_resolvers' span. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### collect.project_prompt started"), + "install should output 'collect.project_prompt' span. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn install_outputs_plugin_resolution() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + let result = runner.run(&["--info", "install"]); + result.assert_success("tnmsc --info install"); + + // 验证插件解析信息 + assert!( + result.stdout.contains("Plugins resolved"), + "install should output plugin resolution. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn install_outputs_file_write_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + let result = runner.run(&["--debug", "install"]); + result.assert_success("tnmsc --debug install"); + + // 验证文件写入事件(应该有文件被写入) + assert!( + result.stdout.contains("file.written") || result.stdout.contains("file.skipped"), + "install should output file write events. stdout:\n{}", + result.stdout + ); +} diff --git a/cli/local-tests/tests/logging_levels.rs b/cli/local-tests/tests/logging_levels.rs new file mode 100644 index 00000000..b04ba3dc --- /dev/null +++ b/cli/local-tests/tests/logging_levels.rs @@ -0,0 +1,106 @@ +//! 日志级别测试:验证不同日志级别下的输出行为。 + +use tnmsc_local_tests::LocalTestRunner; + +#[test] +fn trace_level_outputs_span_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + // clean 后 install,确保有文件写入操作 + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + let result = runner.run(&["--trace", "install"]); + result.assert_success("tnmsc --trace install"); + + // Trace 级别应该输出 collector span + assert!( + result.stdout.contains("### collect.aindex_resolvers started"), + "--trace should output collector spans. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### config.load started"), + "--trace should output config span. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn info_level_outputs_top_level_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + let result = runner.install(); // 默认 info 级别 + result.assert_success("tnmsc install"); + + // Info 级别应该输出顶层事件 + assert!( + result.stdout.contains("### Install started"), + "default level should output 'Install started'. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### Install completed"), + "default level should output 'Install completed'. stdout:\n{}", + result.stdout + ); +} + +#[test] +fn error_level_only_outputs_errors() { + let runner = LocalTestRunner::new(); + // 在一个没有 config 的目录运行,并隔离全局配置,触发错误 + let temp_home = std::env::temp_dir().join("tnmsc_test_home"); + let _ = std::fs::remove_dir_all(&temp_home); + std::fs::create_dir_all(&temp_home).unwrap(); + let fake_config = temp_home.join(".tnmsc.json"); + let result = runner.run_at_with_env( + std::env::temp_dir(), + &["--error", "install"], + &[("TNMSC_CONFIG_PATH", fake_config.to_str().unwrap())], + ); + result.assert_failure("tnmsc --error install without config"); + + // Error 级别不应该输出 info 事件 + assert!( + !result.stdout.contains("### Install started"), + "--error should not output info events. stdout:\n{}", + result.stdout + ); + + // 但应该输出错误诊断 + assert!( + result.stderr.contains("What happened") || result.stderr.contains("error"), + "--error should output error diagnostics. stderr:\n{}", + result.stderr + ); +} + +#[test] +fn debug_level_outputs_debug_events() { + let runner = LocalTestRunner::new(); + runner.assert_project_ready(); + + let clean = runner.clean(); + clean.assert_success("tnmsc clean"); + + let result = runner.run(&["--debug", "install"]); + result.assert_success("tnmsc --debug install"); + + // Debug 级别应该输出更多上下文 + assert!( + result.stdout.contains("### Context collected"), + "--debug should output 'Context collected'. stdout:\n{}", + result.stdout + ); + assert!( + result.stdout.contains("### Output files built"), + "--debug should output 'Output files built'. stdout:\n{}", + result.stdout + ); +} diff --git a/cli/src/cli.rs b/cli/src/cli.rs index 02020fdd..0ab42be5 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -6,8 +6,6 @@ use std::path::PathBuf; use clap::{Args, Parser, Subcommand}; -use crate::logger::LogLevel; - /// Cross-AI-tool prompt synchronisation CLI #[derive(Parser, Debug)] #[command( @@ -115,25 +113,7 @@ impl ResolvedLogLevel { } } - pub fn to_logger_level(self) -> LogLevel { - match self { - Self::Trace => LogLevel::Trace, - Self::Debug => LogLevel::Debug, - Self::Info => LogLevel::Info, - Self::Warn => LogLevel::Warn, - Self::Error => LogLevel::Error, - } - } - pub fn to_sdk_logger_level(self) -> tnmsd::infra::logger::LogLevel { - match self { - Self::Trace => tnmsd::infra::logger::LogLevel::Trace, - Self::Debug => tnmsd::infra::logger::LogLevel::Debug, - Self::Info => tnmsd::infra::logger::LogLevel::Info, - Self::Warn => tnmsd::infra::logger::LogLevel::Warn, - Self::Error => tnmsd::infra::logger::LogLevel::Error, - } - } } /// Resolve log level from CLI flags. diff --git a/cli/src/commands/pipeline.rs b/cli/src/commands/pipeline.rs index 824719c7..af282096 100644 --- a/cli/src/commands/pipeline.rs +++ b/cli/src/commands/pipeline.rs @@ -1,8 +1,8 @@ use std::process::ExitCode; -use serde_json::Value; +use serde_json::{Value, json}; -use crate::logger; +use tnmsd::infra::logger::{Logger, create_logger, flush}; #[derive(Debug, PartialEq, Eq)] struct RenderedCommandResult { @@ -155,30 +155,44 @@ fn render_entry(label: &str, value: &Value) -> Vec { } } -fn log_command_start(command_name: &str) { - logger::info(&format!("Running {command_name}")); +fn log_command_start(logger: &Logger, command_name: &str) { + logger.info(format!("Running {command_name}"), None); if let Ok(current_dir) = std::env::current_dir() { - logger::debug(&format!("currentDir={}", current_dir.display())); + logger.debug( + "currentDir", + Some(json!({ "currentDir": current_dir.display().to_string() })), + ); } } fn log_command_finish( + logger: &Logger, command_name: &str, result: &Result, ) { match result { Ok(command_result) => { - logger::debug(&format!( - "{command_name} result: success={}, filesAffected={}, dirsAffected={}, warnings={}, errors={}", - command_result.success, - command_result.files_affected, - command_result.dirs_affected, - command_result.warnings.len(), - command_result.errors.len(), - )); + logger.debug( + "command result", + Some(json!({ + "command": command_name, + "success": command_result.success, + "filesAffected": command_result.files_affected, + "dirsAffected": command_result.dirs_affected, + "warnings": command_result.warnings.len(), + "errors": command_result.errors.len(), + })), + ); } Err(error) => { - logger::error(&format!("{command_name} failed: {error}")); + logger.error(tnmsd::infra::logger::DiagnosticInput { + code: "COMMAND_FAILED".to_string(), + title: format!("{command_name} failed"), + root_cause: vec![error.to_string()], + exact_fix: None, + possible_fixes: None, + details: None, + }); } } } @@ -190,9 +204,10 @@ fn run_command( ) -> Result, options: tnmsd::MemorySyncCommandOptions, ) -> ExitCode { - log_command_start(command_name); + let logger = create_logger("pipeline", None); + log_command_start(&logger, command_name); let result = operation(options); - log_command_finish(command_name, &result); + log_command_finish(&logger, command_name, &result); let rendered = render_result(result); for line in rendered.stdout_lines { @@ -202,8 +217,7 @@ fn run_command( eprintln!("{line}"); } - logger::flush_output(); - tnmsd::infra::logger::flush_output(); + flush(); if rendered.success { ExitCode::SUCCESS diff --git a/cli/src/lib.rs b/cli/src/lib.rs index 96dbf2d2..7b6e5a0b 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -1,17 +1,23 @@ mod cli; mod commands; -mod logger; use std::process::ExitCode; use clap::Parser; +use tnmsd::infra::logger::{LogLevel, set_global_level}; pub fn run() -> ExitCode { let args = cli::Cli::parse(); if let Some(level) = cli::resolve_log_level(&args) { - logger::set_global_log_level(level.to_logger_level()); - tnmsd::infra::logger::set_global_log_level(level.to_sdk_logger_level()); + let log_level = match level { + cli::ResolvedLogLevel::Trace => LogLevel::Trace, + cli::ResolvedLogLevel::Debug => LogLevel::Debug, + cli::ResolvedLogLevel::Info => LogLevel::Info, + cli::ResolvedLogLevel::Warn => LogLevel::Warn, + cli::ResolvedLogLevel::Error => LogLevel::Error, + }; + set_global_level(log_level); } match cli::resolve_command(&args) { diff --git a/cli/src/logger.rs b/cli/src/logger.rs deleted file mode 100644 index bc5ef684..00000000 --- a/cli/src/logger.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::sync::OnceLock; - -static LOGGER: OnceLock = OnceLock::new(); - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)] -pub enum LogLevel { - Trace, - Debug, - Info, - Warn, - Error, -} - -struct Logger { - level: LogLevel, -} - -impl Logger { - fn new(level: LogLevel) -> Self { - Self { level } - } - - fn log(&self, level: LogLevel, message: &str) { - if level >= self.level { - eprintln!("[{}] {}", level_to_string(level), message); - } - } -} - -fn level_to_string(level: LogLevel) -> &'static str { - match level { - LogLevel::Trace => "TRACE", - LogLevel::Debug => "DEBUG", - LogLevel::Info => "INFO", - LogLevel::Warn => "WARN", - LogLevel::Error => "ERROR", - } -} - -pub fn set_global_log_level(level: LogLevel) { - let _ = LOGGER.set(Logger::new(level)); -} - -pub fn flush_output() {} - -pub fn trace(message: &str) { - if let Some(logger) = LOGGER.get() { - logger.log(LogLevel::Trace, message); - } -} - -pub fn debug(message: &str) { - if let Some(logger) = LOGGER.get() { - logger.log(LogLevel::Debug, message); - } -} - -pub fn info(message: &str) { - if let Some(logger) = LOGGER.get() { - logger.log(LogLevel::Info, message); - } -} - -pub fn warn(message: &str) { - if let Some(logger) = LOGGER.get() { - logger.log(LogLevel::Warn, message); - } -} - -pub fn error(message: &str) { - if let Some(logger) = LOGGER.get() { - logger.log(LogLevel::Error, message); - } -} diff --git a/mcp/src/main.rs b/mcp/src/main.rs index 924e0d62..017697d5 100644 --- a/mcp/src/main.rs +++ b/mcp/src/main.rs @@ -151,6 +151,9 @@ fn handle_tools_call(params: &Value) -> Value { }; let arguments = params.get("arguments").cloned().unwrap_or(json!({})); + let logger = tnmsd::infra::logger::create_logger("mcp.tools", None); + let _span = logger.span(&format!("tools.{}", name)).enter(); + match name { "list_prompts" => handle_list_prompts(&arguments), "get_prompt" => handle_get_prompt(&arguments), @@ -319,13 +322,30 @@ fn run_stdio_server() { } fn main() -> ExitCode { + // Initialize logger, default Info, override via LOG_LEVEL env var + tnmsd::infra::logger::set_global_level( + std::env::var("LOG_LEVEL") + .ok() + .and_then(|s| tnmsd::infra::logger::LogLevel::from_str_loose(&s)) + .unwrap_or(tnmsd::infra::logger::LogLevel::Info) + ); + let cli = Cli::parse(); + let logger = tnmsd::infra::logger::create_logger("tnmsm", None); match resolve_command(&cli) { ResolvedCommand::Serve => { + let _span = logger.span("server.serve").enter(); + logger.info("MCP server started", Some(json!({ + "serverName": SERVER_NAME, + "protocolVersion": PROTOCOL_VERSION, + }))); run_stdio_server(); ExitCode::SUCCESS } - ResolvedCommand::AssembleNpm(args) => commands::package::execute(&args), + ResolvedCommand::AssembleNpm(args) => { + let _span = logger.span("command.assemble_npm").enter(); + commands::package::execute(&args) + } } } diff --git a/sdk/src/context/mod.rs b/sdk/src/context/mod.rs index 6b9ede14..f2a1b6ed 100644 --- a/sdk/src/context/mod.rs +++ b/sdk/src/context/mod.rs @@ -1,3 +1 @@ -pub mod output_context; - -pub use output_context::OutputContext; +pub use crate::domain::output_context::OutputContext; diff --git a/sdk/src/domain/base_output_plans.rs b/sdk/src/domain/base_output_plans.rs index 238fc95a..2e708eef 100644 --- a/sdk/src/domain/base_output_plans.rs +++ b/sdk/src/domain/base_output_plans.rs @@ -3,12 +3,12 @@ use std::path::{Component, Path, PathBuf}; use serde::{Deserialize, Serialize}; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::{ IDEKind, Project, ProjectIDEConfigFile, RelativePath, Workspace, }; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; -use crate::policy::git_discovery::{find_all_git_repos, resolve_git_info_dir}; +use crate::infra::git_fs::{find_all_git_repos, resolve_git_info_dir}; const AGENTS_PLUGIN_NAME: &str = "AgentsOutputAdaptor"; const GIT_EXCLUDE_PLUGIN_NAME: &str = "GitExcludeOutputAdaptor"; diff --git a/sdk/src/domain/cleanup.rs b/sdk/src/domain/cleanup.rs new file mode 100644 index 00000000..322828c0 --- /dev/null +++ b/sdk/src/domain/cleanup.rs @@ -0,0 +1,141 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ProtectionModeDto { + Direct, + Recursive, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ProtectionRuleMatcherDto { + Path, + Glob, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum CleanupTargetKindDto { + File, + Directory, + Glob, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum CleanupErrorKindDto { + File, + Directory, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupTargetDto { + pub path: String, + pub kind: CleanupTargetKindDto, + #[serde(default)] + pub exclude_basenames: Vec, + pub protection_mode: Option, + pub scope: Option, + pub label: Option, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupDeclarationsDto { + #[serde(default)] + pub delete: Vec, + #[serde(default)] + pub protect: Vec, + #[serde(default)] + pub exclude_scan_globs: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PluginCleanupSnapshotDto { + pub plugin_name: String, + #[serde(default)] + pub outputs: Vec, + #[serde(default)] + pub cleanup: CleanupDeclarationsDto, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProtectedRuleDto { + pub path: String, + pub protection_mode: ProtectionModeDto, + pub reason: String, + pub source: String, + pub matcher: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupSnapshot { + pub workspace_dir: String, + pub aindex_dir: Option, + #[serde(default)] + pub project_roots: Vec, + #[serde(default)] + pub protected_rules: Vec, + #[serde(default)] + pub plugin_snapshots: Vec, + pub empty_dir_exclude_globs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProtectedPathViolationDto { + pub target_path: String, + pub protected_path: String, + pub protection_mode: ProtectionModeDto, + pub reason: String, + pub source: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupProtectionConflictDto { + pub output_path: String, + pub output_plugin: String, + pub protected_path: String, + pub protection_mode: ProtectionModeDto, + pub protected_by: String, + pub reason: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupPlan { + pub files_to_delete: Vec, + pub dirs_to_delete: Vec, + pub empty_dirs_to_delete: Vec, + pub violations: Vec, + pub conflicts: Vec, + pub excluded_scan_globs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupErrorDto { + pub path: String, + pub kind: CleanupErrorKindDto, + pub error: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupExecutionResultDto { + pub deleted_files: usize, + pub deleted_dirs: usize, + pub errors: Vec, + pub violations: Vec, + pub conflicts: Vec, + pub files_to_delete: Vec, + pub dirs_to_delete: Vec, + pub empty_dirs_to_delete: Vec, + pub excluded_scan_globs: Vec, +} diff --git a/sdk/src/domain/config/mod.rs b/sdk/src/domain/config/mod.rs index e0152344..269ee8d8 100644 --- a/sdk/src/domain/config/mod.rs +++ b/sdk/src/domain/config/mod.rs @@ -268,7 +268,9 @@ pub struct RuntimeEnvironmentContext { } fn home_dir() -> Option { - dirs::home_dir() + std::env::var_os("HOME") + .map(PathBuf::from) + .or_else(|| dirs::home_dir()) } fn normalize_posix_like_path(raw_path: &str) -> String { @@ -666,7 +668,13 @@ pub fn resolve_workspace_aindex_source_series_dir( } /// Get the global config file path: `~/.aindex/.tnmsc.json` +/// +/// Override via `TNMSC_CONFIG_PATH` environment variable. pub fn get_global_config_path() -> PathBuf { + if let Ok(override_path) = std::env::var("TNMSC_CONFIG_PATH") { + return PathBuf::from(override_path); + } + let runtime_environment = resolve_runtime_environment(); if let Some(selected_path) = runtime_environment.selected_global_config_path { diff --git a/sdk/src/domain/mod.rs b/sdk/src/domain/mod.rs index cc7018c4..6f46f85d 100644 --- a/sdk/src/domain/mod.rs +++ b/sdk/src/domain/mod.rs @@ -1,10 +1,17 @@ pub mod base_output_plans; +pub mod cleanup; pub mod config; +pub mod output_context; pub mod output_plans; pub mod plugin_shared; pub use base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPlansDto}; +pub use cleanup::{ + CleanupDeclarationsDto, CleanupPlan, CleanupSnapshot, CleanupTargetDto, CleanupTargetKindDto, + ProtectionModeDto, +}; pub use config::{ConfigLoader, MergedConfigResult, PluginsConfig, UserConfigFile}; +pub use output_context::OutputContext; pub use plugin_shared::{ AIAgentIgnoreConfigFile, FastCommandPrompt, GlobalMemoryPrompt, IDEKind, NamingCaseKind, PluginKind, Project, ProjectIDEConfigFile, PromptKind, ReadmePrompt, RelativePath, RulePrompt, diff --git a/sdk/src/domain/output_context.rs b/sdk/src/domain/output_context.rs new file mode 100644 index 00000000..7bd235e3 --- /dev/null +++ b/sdk/src/domain/output_context.rs @@ -0,0 +1,53 @@ +use serde::{Deserialize, Serialize}; + +use crate::domain::plugin_shared::{ + AIAgentIgnoreConfigFile, FastCommandPrompt, GlobalMemoryPrompt, ProjectIDEConfigFile, + ReadmePrompt, RulePrompt, SkillPrompt, SubAgentPrompt, Workspace, +}; + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OutputContext { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub workspace: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub vscode_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub zed_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub jetbrains_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub editor_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub fast_commands: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub sub_agents: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub skills: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub rules: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub global_memory: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub global_git_ignore: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub shadow_git_exclude: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub shadow_source_project_dir: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub readme_prompts: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub ai_agent_ignore_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub registered_output_plugins: Option>, +} + +impl OutputContext { + pub fn from_json(json: &str) -> Result { + serde_json::from_str(json) + } + + pub fn to_json(&self) -> Result { + serde_json::to_string(self) + } +} diff --git a/sdk/src/domain/output_plans/claude_code_output_plan.rs b/sdk/src/domain/output_plans/claude_code_output_plan.rs index 41722c81..050dd0f9 100644 --- a/sdk/src/domain/output_plans/claude_code_output_plan.rs +++ b/sdk/src/domain/output_plans/claude_code_output_plan.rs @@ -3,11 +3,11 @@ use std::path::PathBuf; use serde_json::Value; use crate::CliError; -use crate::context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; use crate::domain::config; +use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const CLAUDE_CODE_PLUGIN_NAME: &str = "ClaudeCodeCLIOutputAdaptor"; const CLAUDE_CODE_MEMORY_FILE: &str = "CLAUDE.md"; diff --git a/sdk/src/domain/output_plans/codex_output_plan.rs b/sdk/src/domain/output_plans/codex_output_plan.rs index 6fd73388..68eb89a0 100644 --- a/sdk/src/domain/output_plans/codex_output_plan.rs +++ b/sdk/src/domain/output_plans/codex_output_plan.rs @@ -18,11 +18,11 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::config; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const CODEX_PLUGIN_NAME: &str = "CodexCLIOutputAdaptor"; const CODEX_INSTRUCTIONS_FILE: &str = "AGENTS.md"; @@ -254,7 +254,7 @@ fn build_agent_toml_content(agent: &crate::domain::plugin_shared::SubAgentPrompt } fn build_command_content(command: &crate::domain::plugin_shared::FastCommandPrompt) -> String { - let mut metadata = if let Some(ref yaml_fm) = command.yaml_front_matter { + let metadata = if let Some(ref yaml_fm) = command.yaml_front_matter { match serde_json::to_value(yaml_fm) { Ok(serde_json::Value::Object(map)) => map, _ => serde_json::Map::new(), @@ -541,13 +541,6 @@ fn get_project_output_projects(workspace: &Workspace) -> Vec<&Project> { projects } -fn get_project_prompt_output_projects(workspace: &Workspace) -> Vec<&Project> { - get_project_output_projects(workspace) - .into_iter() - .filter(|p| p.is_prompt_source_project != Some(true)) - .collect() -} - fn resolve_project_root_dir(workspace: &Workspace, project: &Project) -> Option { if project.is_workspace_root_project == Some(true) { return Some(PathBuf::from(&workspace.directory.path)); diff --git a/sdk/src/domain/output_plans/cursor_output_plan.rs b/sdk/src/domain/output_plans/cursor_output_plan.rs index 79a334f3..8edbfb6e 100644 --- a/sdk/src/domain/output_plans/cursor_output_plan.rs +++ b/sdk/src/domain/output_plans/cursor_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const CURSOR_PLUGIN_NAME: &str = "CursorOutputAdaptor"; const CURSOR_MEMORY_FILE: &str = ".cursorrules"; diff --git a/sdk/src/domain/output_plans/droid_output_plan.rs b/sdk/src/domain/output_plans/droid_output_plan.rs index c91869a2..c0220f13 100644 --- a/sdk/src/domain/output_plans/droid_output_plan.rs +++ b/sdk/src/domain/output_plans/droid_output_plan.rs @@ -5,13 +5,13 @@ use serde::{Deserialize, Serialize}; use serde_json::{Map, Value}; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::config; use crate::domain::plugin_shared::{ FastCommandPrompt, Project, RelativePath, RuleScope, SkillPrompt, SkillResourceEncoding, Workspace, }; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const DROID_PLUGIN_NAME: &str = "DroidCLIOutputAdaptor"; const DROID_MEMORY_FILE: &str = "AGENTS.md"; @@ -991,6 +991,7 @@ mod tests { .find(|entry| { entry .path + .replace('\\', "/") .ends_with("project-a/.factory/skills/ship/SKILL.md") }) .unwrap(); @@ -1000,6 +1001,7 @@ mod tests { .find(|entry| { entry .path + .replace('\\', "/") .ends_with("project-a/.factory/skills/ship/assets/blob.bin") }) .unwrap(); diff --git a/sdk/src/domain/output_plans/gemini_output_plan.rs b/sdk/src/domain/output_plans/gemini_output_plan.rs index f970fd36..a3ad9ea0 100644 --- a/sdk/src/domain/output_plans/gemini_output_plan.rs +++ b/sdk/src/domain/output_plans/gemini_output_plan.rs @@ -2,11 +2,11 @@ use std::collections::HashSet; use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::config; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const GEMINI_PLUGIN_NAME: &str = "GeminiCLIOutputAdaptor"; const GEMINI_MEMORY_FILE: &str = "GEMINI.md"; diff --git a/sdk/src/domain/output_plans/generic_skills_output_plan.rs b/sdk/src/domain/output_plans/generic_skills_output_plan.rs index ff62d3cd..7210563a 100644 --- a/sdk/src/domain/output_plans/generic_skills_output_plan.rs +++ b/sdk/src/domain/output_plans/generic_skills_output_plan.rs @@ -1,8 +1,8 @@ use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; use crate::domain::plugin_shared::Workspace; -use crate::policy::cleanup::CleanupDeclarationsDto; +use crate::domain::cleanup::CleanupDeclarationsDto; const GENERIC_SKILLS_PLUGIN_NAME: &str = "GenericSkillsOutputAdaptor"; diff --git a/sdk/src/domain/output_plans/jetbrains_ai_assistant_codex_output_plan.rs b/sdk/src/domain/output_plans/jetbrains_ai_assistant_codex_output_plan.rs index fd13504b..24d97286 100644 --- a/sdk/src/domain/output_plans/jetbrains_ai_assistant_codex_output_plan.rs +++ b/sdk/src/domain/output_plans/jetbrains_ai_assistant_codex_output_plan.rs @@ -1,8 +1,8 @@ use crate::CliError; -use crate::context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; +use crate::domain::cleanup::CleanupDeclarationsDto; +use crate::domain::output_context::OutputContext; use crate::domain::plugin_shared::Workspace; -use crate::policy::cleanup::CleanupDeclarationsDto; const JB_PLUGIN_NAME: &str = "JetBrainsAIAssistantCodexOutputAdaptor"; diff --git a/sdk/src/domain/output_plans/kiro_output_plan.rs b/sdk/src/domain/output_plans/kiro_output_plan.rs index 4cd62e11..472b5095 100644 --- a/sdk/src/domain/output_plans/kiro_output_plan.rs +++ b/sdk/src/domain/output_plans/kiro_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const KIRO_PLUGIN_NAME: &str = "KiroCLIOutputAdaptor"; const PROJECT_SCOPE: &str = "project"; diff --git a/sdk/src/domain/output_plans/opencode_output_plan.rs b/sdk/src/domain/output_plans/opencode_output_plan.rs index 07ccb0ce..6bd98f61 100644 --- a/sdk/src/domain/output_plans/opencode_output_plan.rs +++ b/sdk/src/domain/output_plans/opencode_output_plan.rs @@ -3,11 +3,11 @@ use std::path::PathBuf; use serde_json::Value; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::config; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const OPENCODE_PLUGIN_NAME: &str = "OpencodeCLIOutputAdaptor"; const OPENCODE_MEMORY_FILE: &str = "AGENTS.md"; diff --git a/sdk/src/domain/output_plans/qoder_output_plan.rs b/sdk/src/domain/output_plans/qoder_output_plan.rs index ede69984..81fdb322 100644 --- a/sdk/src/domain/output_plans/qoder_output_plan.rs +++ b/sdk/src/domain/output_plans/qoder_output_plan.rs @@ -1,8 +1,8 @@ use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::BaseOutputPluginPlanDto; use crate::domain::plugin_shared::Workspace; -use crate::policy::cleanup::CleanupDeclarationsDto; +use crate::domain::cleanup::CleanupDeclarationsDto; const QODER_PLUGIN_NAME: &str = "QoderIDEPluginOutputAdaptor"; diff --git a/sdk/src/domain/output_plans/trae_output_plan.rs b/sdk/src/domain/output_plans/trae_output_plan.rs index 68948637..6c4c9f42 100644 --- a/sdk/src/domain/output_plans/trae_output_plan.rs +++ b/sdk/src/domain/output_plans/trae_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const TRAE_PLUGIN_NAME: &str = "TraeOutputAdaptor"; const TRAE_STEERING_FILE: &str = "GLOBAL.md"; diff --git a/sdk/src/domain/output_plans/warp_output_plan.rs b/sdk/src/domain/output_plans/warp_output_plan.rs index 82e687c5..5a604ae9 100644 --- a/sdk/src/domain/output_plans/warp_output_plan.rs +++ b/sdk/src/domain/output_plans/warp_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const WARP_PLUGIN_NAME: &str = "WarpIDEOutputAdaptor"; const WARP_MEMORY_FILE: &str = "WARP.md"; diff --git a/sdk/src/domain/output_plans/windsurf_output_plan.rs b/sdk/src/domain/output_plans/windsurf_output_plan.rs index 8350b7d9..c97b302c 100644 --- a/sdk/src/domain/output_plans/windsurf_output_plan.rs +++ b/sdk/src/domain/output_plans/windsurf_output_plan.rs @@ -1,10 +1,10 @@ use std::path::PathBuf; use crate::CliError; -use crate::context::OutputContext; +use crate::domain::output_context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; use crate::domain::plugin_shared::{Project, RelativePath, Workspace}; -use crate::policy::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::domain::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; const WINDSURF_PLUGIN_NAME: &str = "WindsurfOutputAdaptor"; const WINDSURF_MEMORY_FILE: &str = ".windsurfrules"; diff --git a/sdk/src/infra/git_fs.rs b/sdk/src/infra/git_fs.rs new file mode 100644 index 00000000..9197d21b --- /dev/null +++ b/sdk/src/infra/git_fs.rs @@ -0,0 +1,196 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +pub fn resolve_git_info_dir(project_dir: &Path) -> Option { + let dot_git = project_dir.join(".git"); + if !dot_git.exists() { + return None; + } + + let metadata = fs::symlink_metadata(&dot_git).ok()?; + if metadata.is_dir() { + return Some(dot_git.join("info")); + } + + if metadata.is_file() { + let content = fs::read_to_string(&dot_git).ok()?; + for line in content.lines() { + let line = line.trim(); + if let Some(gitdir) = line.strip_prefix("gitdir:") { + let gitdir = Path::new(gitdir.trim()); + let resolved = if gitdir.is_absolute() { + gitdir.to_path_buf() + } else { + project_dir.join(gitdir) + }; + return Some(resolved.join("info")); + } + } + } + + None +} + +const SKIP_DIRS: &[&str] = &["node_modules", ".turbo", "dist", "build", "out", ".cache"]; + +pub fn find_all_git_repos(root_dir: &Path, max_depth: usize) -> Vec { + let mut results = Vec::new(); + + fn walk(dir: &Path, root_dir: &Path, depth: usize, max_depth: usize, results: &mut Vec) { + if depth > max_depth { + return; + } + + let entries = match fs::read_dir(dir) { + Ok(e) => e, + Err(_) => return, + }; + + let mut has_git = false; + let mut subdirs = Vec::new(); + + for entry in entries.flatten() { + let name = entry.file_name(); + let name_str = name.to_string_lossy(); + if name_str == ".git" { + has_git = true; + continue; + } + if let Ok(ft) = entry.file_type() + && ft.is_dir() + && !SKIP_DIRS.contains(&name_str.as_ref()) + { + subdirs.push(entry.path()); + } + } + + if has_git && dir != root_dir { + results.push(dir.to_path_buf()); + } + + for subdir in subdirs { + walk(&subdir, root_dir, depth + 1, max_depth, results); + } + } + + walk(root_dir, root_dir, 0, max_depth, &mut results); + results +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + #[test] + fn test_resolve_git_info_dir_for_regular_repo() { + let tmp = TempDir::new().unwrap(); + let dot_git = tmp.path().join(".git"); + fs::create_dir_all(&dot_git).unwrap(); + + let result = resolve_git_info_dir(tmp.path()); + assert_eq!(result, Some(dot_git.join("info"))); + } + + #[test] + fn test_resolve_git_info_dir_for_gitlink() { + let tmp = TempDir::new().unwrap(); + let dot_git = tmp.path().join(".git"); + fs::write(&dot_git, "gitdir: /absolute/path/to/git\n").unwrap(); + + let result = resolve_git_info_dir(tmp.path()); + assert!(result.is_some()); + let result_str = result.as_ref().unwrap().to_string_lossy().replace('\\', "/"); + // On Windows, absolute paths starting with / get a drive letter prefix + let result_normalized = result_str + .strip_prefix("C:") + .or_else(|| result_str.strip_prefix("c:")) + .unwrap_or(&result_str); + assert_eq!(result_normalized, "/absolute/path/to/git/info"); + } + + #[test] + fn test_resolve_git_info_dir_for_relative_gitlink() { + let tmp = TempDir::new().unwrap(); + let dot_git = tmp.path().join(".git"); + fs::write(&dot_git, "gitdir: ../.git/modules/foo\n").unwrap(); + + let result = resolve_git_info_dir(tmp.path()); + assert_eq!( + result, + Some( + tmp + .path() + .join("..") + .join(".git") + .join("modules") + .join("foo") + .join("info") + .canonicalize() + .unwrap_or_else(|_| tmp + .path() + .join("..") + .join(".git") + .join("modules") + .join("foo") + .join("info")) + ) + ); + } + + #[test] + fn test_resolve_git_info_dir_missing() { + let tmp = TempDir::new().unwrap(); + assert_eq!(resolve_git_info_dir(tmp.path()), None); + } + + #[test] + fn test_find_all_git_repos_finds_nested() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + let child = root.join("packages").join("app"); + fs::create_dir_all(root.join(".git")).unwrap(); + fs::create_dir_all(child.join(".git")).unwrap(); + + let result = find_all_git_repos(root, 5); + assert_eq!(result.len(), 1); + assert_eq!(result[0], child); + } + + #[test] + fn test_find_all_git_repos_excludes_root() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + fs::create_dir_all(root.join(".git")).unwrap(); + + let result = find_all_git_repos(root, 5); + assert!(result.is_empty()); + } + + #[test] + fn test_find_all_git_repos_skips_skip_dirs() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + let node_modules = root.join("node_modules").join("some-lib"); + fs::create_dir_all(node_modules.join(".git")).unwrap(); + + let result = find_all_git_repos(root, 5); + assert!(result.is_empty()); + } + + #[test] + fn test_find_all_git_repos_respects_max_depth() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + let deep = root.join("a").join("b").join("c").join("d"); + fs::create_dir_all(deep.join(".git")).unwrap(); + + let result = find_all_git_repos(root, 3); + assert!(result.is_empty()); + + let result = find_all_git_repos(root, 4); + assert_eq!(result.len(), 1); + assert_eq!(result[0], deep); + } +} diff --git a/sdk/src/infra/logger.rs b/sdk/src/infra/logger.rs deleted file mode 100644 index 5a2bcd44..00000000 --- a/sdk/src/infra/logger.rs +++ /dev/null @@ -1,1175 +0,0 @@ -#![deny(clippy::all)] - -//! AI-friendly Markdown logger with minimal terminal noise. -//! -//! Output format: -//! - Messages: `### Title` with optional Markdown bullet metadata -//! - Diagnostics: `### Title` followed by concise action-focused sections - -use serde::{Deserialize, Serialize}; -use serde_json::{Map, Value}; -use std::io::{BufWriter, Write}; -use std::sync::atomic::{AtomicU8, Ordering}; -use std::sync::mpsc::{self, Receiver, Sender}; -use std::sync::{LazyLock, Mutex}; -use std::thread; - -// --------------------------------------------------------------------------- -// Log levels -// --------------------------------------------------------------------------- - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize)] -#[serde(rename_all = "lowercase")] -pub enum LogLevel { - Silent, - Fatal, - Error, - Warn, - Info, - Debug, - Trace, -} - -impl LogLevel { - fn priority(self) -> u8 { - match self { - Self::Silent => 0, - Self::Fatal => 1, - Self::Error => 2, - Self::Warn => 3, - Self::Info => 4, - Self::Debug => 5, - Self::Trace => 6, - } - } - - fn as_str(self) -> &'static str { - match self { - Self::Silent => "silent", - Self::Fatal => "fatal", - Self::Error => "error", - Self::Warn => "warn", - Self::Info => "info", - Self::Debug => "debug", - Self::Trace => "trace", - } - } - - pub fn from_str_loose(s: &str) -> Option { - match s.to_ascii_lowercase().as_str() { - "silent" => Some(Self::Silent), - "fatal" => Some(Self::Fatal), - "error" => Some(Self::Error), - "warn" => Some(Self::Warn), - "info" => Some(Self::Info), - "debug" => Some(Self::Debug), - "trace" => Some(Self::Trace), - _ => None, - } - } -} - -// --------------------------------------------------------------------------- -// LogRecord (the structured return value) -// --------------------------------------------------------------------------- - -#[derive(Debug, Clone, Serialize)] -pub struct LogRecord { - #[serde(rename = "$")] - pub meta: (String, String, String), - #[serde(rename = "_")] - pub payload: Value, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct LoggerDiagnosticInput { - pub code: String, - pub title: String, - pub root_cause: Vec, - #[serde(skip_serializing_if = "Option::is_none")] - pub exact_fix: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub possible_fixes: Option>>, - #[serde(skip_serializing_if = "Option::is_none")] - pub details: Option>, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct LoggerDiagnosticRecord { - pub code: String, - pub title: String, - pub root_cause: Vec, - #[serde(skip_serializing_if = "Option::is_none")] - pub exact_fix: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub possible_fixes: Option>>, - #[serde(skip_serializing_if = "Option::is_none")] - pub details: Option>, - pub level: String, - pub namespace: String, - pub copy_text: Vec, -} - -// --------------------------------------------------------------------------- -// Global log level -// --------------------------------------------------------------------------- - -static GLOBAL_LOG_LEVEL: AtomicU8 = AtomicU8::new(255); // 255 = unset -static BUFFERED_DIAGNOSTICS: LazyLock>> = - LazyLock::new(|| Mutex::new(Vec::new())); -static OUTPUT_SINK: LazyLock> = LazyLock::new(spawn_output_sink); - -enum OutputCommand { - Write { use_stderr: bool, output: String }, - Flush { ack: Sender<()> }, -} - -/// Set the global log level for all loggers. -pub fn set_global_log_level(level: LogLevel) { - GLOBAL_LOG_LEVEL.store(level.priority(), Ordering::Relaxed); -} - -/// Get the current global log level. -pub fn get_global_log_level() -> Option { - let v = GLOBAL_LOG_LEVEL.load(Ordering::Relaxed); - if v == 255 { None } else { priority_to_level(v) } -} - -pub fn clear_buffered_diagnostics() { - if let Ok(mut buffered) = BUFFERED_DIAGNOSTICS.lock() { - buffered.clear(); - } -} - -pub fn drain_buffered_diagnostics() -> Vec { - match BUFFERED_DIAGNOSTICS.lock() { - Ok(mut buffered) => std::mem::take(&mut *buffered), - Err(_) => Vec::new(), - } -} - -pub fn flush_output() { - let (ack_tx, ack_rx) = mpsc::channel(); - if OUTPUT_SINK - .send(OutputCommand::Flush { ack: ack_tx }) - .is_ok() - { - let _ = ack_rx.recv(); - } -} - -fn priority_to_level(p: u8) -> Option { - match p { - 0 => Some(LogLevel::Silent), - 1 => Some(LogLevel::Fatal), - 2 => Some(LogLevel::Error), - 3 => Some(LogLevel::Warn), - 4 => Some(LogLevel::Info), - 5 => Some(LogLevel::Debug), - 6 => Some(LogLevel::Trace), - _ => None, - } -} - -fn resolve_log_level(explicit: Option) -> LogLevel { - if let Some(l) = explicit { - return l; - } - if let Some(l) = get_global_log_level() { - return l; - } - if let Ok(env_val) = std::env::var("LOG_LEVEL") - && let Some(l) = LogLevel::from_str_loose(&env_val) - { - return l; - } - LogLevel::Info -} - -// --------------------------------------------------------------------------- -// JSON formatting -// --------------------------------------------------------------------------- - -fn indent(level: usize) -> String { - " ".repeat(level) -} - -fn to_plain_json(value: &Value) -> String { - serde_json::to_string(value) - .unwrap_or_else(|_| r#"{"error":"failed to serialize output"}"#.to_string()) -} - -// --------------------------------------------------------------------------- -// Diagnostics -// --------------------------------------------------------------------------- - -fn validate_non_empty_lines(field_name: &str, lines: &[String], errors: &mut Vec) { - if lines.is_empty() { - errors.push(format!("{field_name} must contain at least one line")); - } -} - -fn validate_diagnostic_input(input: &LoggerDiagnosticInput) -> Result<(), Vec> { - let mut errors: Vec = Vec::new(); - - if input.code.trim().is_empty() { - errors.push("code must be a non-empty string".to_string()); - } - if input.title.trim().is_empty() { - errors.push("title must be a non-empty string".to_string()); - } - validate_non_empty_lines("rootCause", &input.root_cause, &mut errors); - - if let Some(lines) = &input.exact_fix { - validate_non_empty_lines("exactFix", lines, &mut errors); - } - - if let Some(fixes) = &input.possible_fixes { - if fixes.is_empty() { - errors.push("possibleFixes must contain at least one fix when provided".to_string()); - } - for (index, lines) in fixes.iter().enumerate() { - if lines.is_empty() { - errors.push(format!( - "possibleFixes[{index}] must contain at least one line" - )); - } - } - } - - if errors.is_empty() { - Ok(()) - } else { - Err(errors) - } -} - -fn build_payload(message: &Value, meta: Option<&Value>) -> Value { - let Some(meta_val) = meta else { - return message.clone(); - }; - - if meta_val.as_object().is_some_and(|object| object.is_empty()) { - return message.clone(); - } - - let message_str = match message { - Value::String(s) => s.as_str(), - _ => "", - }; - - if message_str.is_empty() { - return meta_val.clone(); - } - - if meta_val.is_object() { - let mut map = Map::new(); - map.insert(message_str.to_string(), meta_val.clone()); - return Value::Object(map); - } - - let mut map = Map::new(); - map.insert( - "message".to_string(), - Value::String(message_str.to_string()), - ); - map.insert("meta".to_string(), meta_val.clone()); - Value::Object(map) -} - -fn append_section( - lines: &mut Vec, - title: &str, - entries: &[String], - numbered: Option, -) { - if entries.is_empty() { - return; - } - - if !lines.is_empty() { - lines.push(String::new()); - } - - if !title.is_empty() { - lines.push(title.to_string()); - } - - match numbered { - Some(number) => { - let mut iter = entries.iter(); - if let Some(first) = iter.next() { - lines.push(format!(" {number}. {first}")); - } - for entry in iter { - lines.push(format!(" {entry}")); - } - } - None => { - for entry in entries { - lines.push(format!(" - {entry}")); - } - } - } -} - -fn scalar_to_markdown_text(value: &Value) -> String { - match value { - Value::Null => "null".to_string(), - Value::Bool(boolean) => boolean.to_string(), - Value::Number(number) => number.to_string(), - Value::String(text) => text.clone(), - Value::Array(_) | Value::Object(_) => to_plain_json(value), - } -} - -fn append_markdown_value( - lines: &mut Vec, - label: Option<&str>, - value: &Value, - depth: usize, -) { - let prefix = indent(depth); - let bullet = format!("{prefix}- "); - - match value { - Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => match label { - Some(name) => { - lines.push(format!( - "{bullet}{name}: {}", - scalar_to_markdown_text(value) - )); - } - None => { - lines.push(format!("{bullet}{}", scalar_to_markdown_text(value))); - } - }, - Value::Array(items) => { - if items.is_empty() { - match label { - Some(name) => { - lines.push(format!("{bullet}{name}: []")); - } - None => { - lines.push(format!("{bullet}[]")); - } - } - return; - } - - if let Some(name) = label { - lines.push(format!("{bullet}{name}:")); - for item in items { - append_markdown_value(lines, None, item, depth + 1); - } - return; - } - - for item in items { - append_markdown_value(lines, None, item, depth); - } - } - Value::Object(map) => { - if map.is_empty() { - match label { - Some(name) => { - lines.push(format!("{bullet}{name}: {{}}")); - } - None => { - lines.push(format!("{bullet}{{}}")); - } - } - return; - } - - if let Some(name) = label { - lines.push(format!("{bullet}{name}:")); - for (key, nested) in map { - append_markdown_value(lines, Some(key), nested, depth + 1); - } - return; - } - - for (key, nested) in map { - append_markdown_value(lines, Some(key), nested, depth); - } - } - } -} - -fn value_to_markdown_lines(value: &Value) -> Vec { - let mut lines = Vec::new(); - append_markdown_value(&mut lines, None, value, 0); - lines -} - -fn extract_message_and_meta_lines(payload: &Value) -> (Option, Vec) { - match payload { - Value::String(text) => (Some(text.clone()), Vec::new()), - Value::Object(map) => { - if let Some(Value::String(message)) = map.get("message") { - let mut remainder = map.clone(); - remainder.remove("message"); - let lines = if remainder.is_empty() { - Vec::new() - } else { - value_to_markdown_lines(&Value::Object(remainder)) - }; - return (Some(message.clone()), lines); - } - - if map.len() == 1 - && let Some((message, nested)) = map.iter().next() - { - match nested { - Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => { - return ( - Some(format!("{message}: {}", scalar_to_markdown_text(nested))), - Vec::new(), - ); - } - Value::Array(items) if !items.is_empty() => { - return (Some(message.clone()), value_to_markdown_lines(nested)); - } - Value::Object(object) if !object.is_empty() => { - return (Some(message.clone()), value_to_markdown_lines(nested)); - } - _ => {} - } - } - - (None, value_to_markdown_lines(payload)) - } - _ => (None, value_to_markdown_lines(payload)), - } -} - -fn split_preserved_lines(text: &str) -> Vec { - text - .split('\n') - .map(|line| line.trim_end_matches('\r').to_string()) - .collect() -} - -fn render_markdown_heading(title: &str) -> String { - format!("### {title}") -} - -fn split_message_title(message: &str) -> (String, Vec) { - let mut lines = split_preserved_lines(message).into_iter(); - let title = lines - .find(|line| !line.trim().is_empty()) - .unwrap_or_else(|| "Details".to_string()); - let body = lines.collect(); - (title, body) -} - -fn render_message_output(_level: LogLevel, _namespace: &str, payload: &Value) -> String { - let (message, meta_lines) = extract_message_and_meta_lines(payload); - let mut lines = Vec::new(); - - match message { - Some(message) if message.contains('\n') => { - let (title, body_lines) = split_message_title(&message); - lines.push(render_markdown_heading(&title)); - if !body_lines.is_empty() { - lines.push(String::new()); - lines.extend(body_lines); - } - } - Some(message) => lines.push(render_markdown_heading(&message)), - None => { - lines.push(render_markdown_heading("Details")); - } - } - - if !meta_lines.is_empty() { - lines.push(String::new()); - lines.extend(meta_lines); - } - - lines.join("\n") -} - -fn render_diagnostic_output(_level: LogLevel, record: &LoggerDiagnosticRecord) -> String { - let mut lines = vec![render_markdown_heading(&record.title)]; - - if !record.root_cause.is_empty() { - append_section(&mut lines, "**What happened**", &record.root_cause, None); - } - - if let Some(exact_fix) = &record.exact_fix { - append_section(&mut lines, "**Do this**", exact_fix, None); - } - - if let Some(possible_fixes) = &record.possible_fixes - && !possible_fixes.is_empty() - { - if !lines.is_empty() { - lines.push(String::new()); - } - lines.push("**Try this if needed**".to_string()); - for (index, fix) in possible_fixes.iter().enumerate() { - let mut iter = fix.iter(); - if let Some(first) = iter.next() { - lines.push(format!(" {}. {}", index + 1, first)); - } - for entry in iter { - lines.push(format!(" {entry}")); - } - } - } - - if let Some(details) = &record.details - && !details.is_empty() - { - if !lines.is_empty() { - lines.push(String::new()); - } - lines.push("**Context**".to_string()); - let mut detail_lines = value_to_markdown_lines(&Value::Object(details.clone())); - for line in &mut detail_lines { - line.insert_str(0, " "); - } - lines.extend(detail_lines); - } - - lines.join("\n") -} - -fn build_copy_text(record: &LoggerDiagnosticRecord) -> Vec { - let mut lines = vec![record.title.clone()]; - - append_section(&mut lines, "**What happened**", &record.root_cause, None); - - if let Some(exact_fix) = &record.exact_fix { - append_section(&mut lines, "**Do this**", exact_fix, None); - } - - if let Some(possible_fixes) = &record.possible_fixes - && !possible_fixes.is_empty() - { - if !lines.is_empty() { - lines.push(String::new()); - } - lines.push("**Try this if needed**".to_string()); - for (index, fix) in possible_fixes.iter().enumerate() { - let mut iter = fix.iter(); - if let Some(first) = iter.next() { - lines.push(format!(" {}. {}", index + 1, first)); - } - for entry in iter { - lines.push(format!(" {entry}")); - } - } - } - - if let Some(details) = &record.details - && !details.is_empty() - { - if !lines.is_empty() { - lines.push(String::new()); - } - lines.push("**Context**".to_string()); - let mut detail_lines = value_to_markdown_lines(&Value::Object(details.clone())); - for line in &mut detail_lines { - line.insert_str(0, " "); - } - lines.extend(detail_lines); - } - - lines -} - -fn diagnostic_record_from_input( - namespace: &str, - level: LogLevel, - input: LoggerDiagnosticInput, -) -> LoggerDiagnosticRecord { - let mut record = LoggerDiagnosticRecord { - code: input.code.trim().to_string(), - title: input.title.trim().to_string(), - root_cause: input.root_cause, - exact_fix: input.exact_fix, - possible_fixes: input.possible_fixes, - details: input.details, - level: level.as_str().to_string(), - namespace: namespace.to_string(), - copy_text: Vec::new(), - }; - record.copy_text = build_copy_text(&record); - record -} - -fn invalid_diagnostic_record( - namespace: &str, - level: LogLevel, - raw_payload: Value, - validation_errors: &[String], -) -> LoggerDiagnosticRecord { - let mut details = Map::new(); - details.insert("rawPayload".to_string(), raw_payload); - details.insert( - "validationErrors".to_string(), - Value::Array( - validation_errors - .iter() - .map(|entry| Value::String(entry.clone())) - .collect(), - ), - ); - - let mut record = LoggerDiagnosticRecord { - code: "LOGGER_DIAGNOSTIC_SCHEMA_INVALID".to_string(), - title: "Logger diagnostic payload is invalid".to_string(), - root_cause: vec![ - "The logger received a warn/error/fatal payload that does not match the required diagnostic schema.".to_string(), - format!("Validation issues: {}", validation_errors.join("; ")), - ], - exact_fix: Some(vec![ - "Pass a diagnostic object with non-empty code, title, and rootCause fields.".to_string(), - "Keep exactFix and each possibleFixes entry as non-empty string arrays when they are present.".to_string(), - ]), - possible_fixes: None, - details: Some(details), - level: level.as_str().to_string(), - namespace: namespace.to_string(), - copy_text: Vec::new(), - }; - record.copy_text = build_copy_text(&record); - record -} - -fn parse_diagnostic_input( - namespace: &str, - level: LogLevel, - diagnostic: Value, -) -> LoggerDiagnosticRecord { - let parsed = serde_json::from_value::(diagnostic.clone()); - match parsed { - Ok(input) => match validate_diagnostic_input(&input) { - Ok(()) => diagnostic_record_from_input(namespace, level, input), - Err(validation_errors) => { - invalid_diagnostic_record(namespace, level, diagnostic, &validation_errors) - } - }, - Err(error) => invalid_diagnostic_record( - namespace, - level, - diagnostic, - &[format!("Diagnostic payload could not be parsed: {error}")], - ), - } -} - -fn serialize_payload(value: impl Serialize) -> Value { - serde_json::to_value(value).unwrap_or_else(|error| { - Value::Object(Map::from_iter([ - ( - "code".to_string(), - Value::String("LOGGER_SERIALIZATION_FAILED".to_string()), - ), - ( - "title".to_string(), - Value::String("Logger payload serialization failed".to_string()), - ), - ("error".to_string(), Value::String(error.to_string())), - ])) - }) -} - -fn push_buffered_diagnostic(record: &LoggerDiagnosticRecord) { - if let Ok(mut buffered) = BUFFERED_DIAGNOSTICS.lock() { - buffered.push(record.clone()); - } -} - -fn writes_to_stderr(level: LogLevel) -> bool { - matches!(level, LogLevel::Error | LogLevel::Fatal | LogLevel::Warn) -} - -// --------------------------------------------------------------------------- -// Format and print -// --------------------------------------------------------------------------- - -fn spawn_output_sink() -> Sender { - let (tx, rx) = mpsc::channel(); - thread::Builder::new() - .name("tnmsd-logger-output".to_string()) - .spawn(move || output_worker(rx)) - .expect("failed to spawn tnmsd logger output worker"); - tx -} - -fn output_worker(receiver: Receiver) { - let stdout = std::io::stdout(); - let stderr = std::io::stderr(); - let mut stdout_writer = BufWriter::new(stdout); - let mut stderr_writer = BufWriter::new(stderr); - - while let Ok(command) = receiver.recv() { - match command { - OutputCommand::Write { use_stderr, output } => { - if use_stderr { - let _ = write_output_line(&mut stderr_writer, &output); - } else { - let _ = write_output_line(&mut stdout_writer, &output); - } - } - OutputCommand::Flush { ack } => { - let _ = stdout_writer.flush(); - let _ = stderr_writer.flush(); - let _ = ack.send(()); - } - } - } - - let _ = stdout_writer.flush(); - let _ = stderr_writer.flush(); -} - -fn write_output_line(writer: &mut impl Write, output: &str) -> std::io::Result<()> { - writer.write_all(output.as_bytes())?; - writer.write_all(b"\n")?; - writer.flush() -} - -fn print_output_direct(use_stderr: bool, output: &str) { - if use_stderr { - let mut stderr = std::io::stderr().lock(); - let _ = writeln!(stderr, "{output}"); - let _ = stderr.flush(); - } else { - let mut stdout = std::io::stdout().lock(); - let _ = writeln!(stdout, "{output}"); - let _ = stdout.flush(); - } -} - -fn print_output(level: LogLevel, output: &str) { - let use_stderr = writes_to_stderr(level); - if OUTPUT_SINK - .send(OutputCommand::Write { - use_stderr, - output: output.to_string(), - }) - .is_err() - { - print_output_direct(use_stderr, output); - } -} - -fn emit_message_log_record(level: LogLevel, namespace: &str, payload: Value) -> LogRecord { - let record = LogRecord { - meta: ( - String::new(), - level.as_str().to_string(), - namespace.to_string(), - ), - payload: payload.clone(), - }; - print_output(level, &render_message_output(level, namespace, &payload)); - record -} - -fn emit_diagnostic_log_record(level: LogLevel, record: &LoggerDiagnosticRecord) -> LogRecord { - let payload = serialize_payload(record); - let emitted = LogRecord { - meta: ( - String::new(), - level.as_str().to_string(), - record.namespace.clone(), - ), - payload, - }; - print_output(level, &render_diagnostic_output(level, record)); - emitted -} - -// --------------------------------------------------------------------------- -// Logger -// --------------------------------------------------------------------------- - -pub struct Logger { - namespace: String, - level: LogLevel, -} - -impl Logger { - pub fn error(&self, diagnostic: LoggerDiagnosticInput) -> Option { - self.log_diagnostic(LogLevel::Error, serialize_payload(diagnostic)) - } - - pub fn warn(&self, diagnostic: LoggerDiagnosticInput) -> Option { - self.log_diagnostic(LogLevel::Warn, serialize_payload(diagnostic)) - } - - pub fn info(&self, message: impl Into, meta: Option) -> Option { - self.log_message(LogLevel::Info, message.into(), meta) - } - - pub fn debug(&self, message: impl Into, meta: Option) -> Option { - self.log_message(LogLevel::Debug, message.into(), meta) - } - - pub fn trace(&self, message: impl Into, meta: Option) -> Option { - self.log_message(LogLevel::Trace, message.into(), meta) - } - - pub fn fatal(&self, diagnostic: LoggerDiagnosticInput) -> Option { - self.log_diagnostic(LogLevel::Fatal, serialize_payload(diagnostic)) - } - - fn should_emit(&self, level: LogLevel) -> bool { - level.priority() <= self.level.priority() - } - - fn should_buffer_diagnostic(&self, level: LogLevel) -> bool { - self.should_emit(level) || self.level == LogLevel::Silent - } - - fn log_message(&self, level: LogLevel, message: Value, meta: Option) -> Option { - if level.priority() > self.level.priority() { - return None; - } - let payload = build_payload(&message, meta.as_ref()); - Some(emit_message_log_record(level, &self.namespace, payload)) - } - - fn log_diagnostic(&self, level: LogLevel, diagnostic: Value) -> Option { - let record = parse_diagnostic_input(&self.namespace, level, diagnostic); - - if self.should_buffer_diagnostic(level) { - push_buffered_diagnostic(&record); - } - - if !self.should_emit(level) { - return None; - } - - Some(emit_diagnostic_log_record(level, &record)) - } -} - -/// Create a new logger with the given namespace and optional log level. -pub fn create_logger(namespace: &str, log_level: Option) -> Logger { - Logger { - namespace: namespace.to_string(), - level: resolve_log_level(log_level), - } -} - -// --------------------------------------------------------------------------- -// Convenience macros -// --------------------------------------------------------------------------- - -#[macro_export] -macro_rules! log_info { - ($logger:expr, $msg:expr) => { - $logger.info(serde_json::Value::String($msg.to_string()), None) - }; - ($logger:expr, $msg:expr, $meta:expr) => { - $logger.info(serde_json::Value::String($msg.to_string()), Some($meta)) - }; -} - -#[macro_export] -macro_rules! log_error { - ($logger:expr, $diagnostic:expr) => { - $logger.error($diagnostic) - }; -} - -#[macro_export] -macro_rules! log_warn { - ($logger:expr, $diagnostic:expr) => { - $logger.warn($diagnostic) - }; -} - -#[macro_export] -macro_rules! log_debug { - ($logger:expr, $msg:expr) => { - $logger.debug(serde_json::Value::String($msg.to_string()), None) - }; - ($logger:expr, $msg:expr, $meta:expr) => { - $logger.debug(serde_json::Value::String($msg.to_string()), Some($meta)) - }; -} - -#[macro_export] -macro_rules! log_trace { - ($logger:expr, $msg:expr) => { - $logger.trace(serde_json::Value::String($msg.to_string()), None) - }; - ($logger:expr, $msg:expr, $meta:expr) => { - $logger.trace(serde_json::Value::String($msg.to_string()), Some($meta)) - }; -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_log_level_priority() { - assert!(LogLevel::Silent.priority() < LogLevel::Fatal.priority()); - assert!(LogLevel::Fatal.priority() < LogLevel::Error.priority()); - assert!(LogLevel::Error.priority() < LogLevel::Warn.priority()); - assert!(LogLevel::Warn.priority() < LogLevel::Info.priority()); - assert!(LogLevel::Info.priority() < LogLevel::Debug.priority()); - assert!(LogLevel::Debug.priority() < LogLevel::Trace.priority()); - } - - #[test] - fn test_log_level_from_str() { - assert_eq!(LogLevel::from_str_loose("info"), Some(LogLevel::Info)); - assert_eq!(LogLevel::from_str_loose("INFO"), Some(LogLevel::Info)); - assert_eq!(LogLevel::from_str_loose("Debug"), Some(LogLevel::Debug)); - assert_eq!(LogLevel::from_str_loose("unknown"), None); - } - - #[test] - fn test_create_logger_default_level() { - let logger = create_logger("test", None); - assert_eq!(logger.level, LogLevel::Info); - } - - #[test] - fn test_logger_filters_by_level() { - let logger = create_logger("test", Some(LogLevel::Warn)); - assert!( - logger - .log_message(LogLevel::Info, Value::String("hi".into()), None) - .is_none() - ); - assert!( - logger - .log_message(LogLevel::Error, Value::String("err".into()), None) - .is_some() - ); - } - - #[test] - fn test_build_payload_uses_meta_when_message_is_empty() { - let payload = build_payload( - &Value::String(String::new()), - Some(&serde_json::json!([1, 2, 3])), - ); - assert_eq!(payload, serde_json::json!([1, 2, 3])); - } - - #[test] - fn test_build_payload_wraps_non_object_meta_for_named_message() { - let payload = build_payload( - &Value::String("hello".into()), - Some(&serde_json::json!(["x"])), - ); - assert_eq!( - payload, - serde_json::json!({ - "message": "hello", - "meta": ["x"], - }) - ); - } - - #[test] - fn test_global_log_level() { - set_global_log_level(LogLevel::Debug); - assert_eq!(get_global_log_level(), Some(LogLevel::Debug)); - GLOBAL_LOG_LEVEL.store(255, Ordering::Relaxed); - } - - #[test] - fn test_validate_diagnostic_input_rejects_empty_root_cause() { - let diagnostic = LoggerDiagnosticInput { - code: "TEST".to_string(), - title: "Broken diagnostic".to_string(), - root_cause: Vec::new(), - exact_fix: None, - possible_fixes: None, - details: None, - }; - - assert!(validate_diagnostic_input(&diagnostic).is_err()); - } - - #[test] - fn test_build_copy_text_includes_expected_sections() { - let record = diagnostic_record_from_input( - "logger-test", - LogLevel::Error, - LoggerDiagnosticInput { - code: "TEST_ERROR".to_string(), - title: "Example diagnostic".to_string(), - root_cause: vec!["The config file is missing.".to_string()], - exact_fix: Some(vec![ - "Create the config file before running again.".to_string(), - ]), - possible_fixes: Some(vec![vec![ - "Restore the file from version control.".to_string(), - "Re-run the setup command if the file is generated.".to_string(), - ]]), - details: Some(Map::from_iter([( - "path".to_string(), - Value::String("/tmp/example.json".to_string()), - )])), - }, - ); - - assert_eq!(record.copy_text[0], "Example diagnostic"); - assert!(record.copy_text.contains(&"**What happened**".to_string())); - assert!(record.copy_text.contains(&"**Do this**".to_string())); - assert!( - record - .copy_text - .contains(&"**Try this if needed**".to_string()) - ); - assert!(record.copy_text.contains(&"**Context**".to_string())); - } - - #[test] - fn test_render_message_output_formats_markdown() { - let payload = Value::Object(Map::from_iter([( - "message".to_string(), - Value::String("hello".to_string()), - )])); - - let rendered = render_message_output(LogLevel::Info, "logger-test", &payload); - assert_eq!(rendered, "### hello"); - } - - #[test] - fn test_render_message_output_moves_multiline_message_to_block_body() { - let payload = Value::String("line one\nline two".to_string()); - let rendered = render_message_output(LogLevel::Info, "logger-test", &payload); - - assert_eq!(rendered, "### line one\n\nline two"); - } - - #[test] - fn test_render_message_output_renders_nested_payloads() { - let payload = serde_json::json!({ - "started": { - "command": "install", - } - }); - - let rendered = render_message_output(LogLevel::Info, "PluginPipeline", &payload); - assert!(rendered.contains("### started")); - assert!(rendered.contains("- command: install")); - } - - #[test] - fn test_render_diagnostic_output_uses_markdown_sections() { - let record = diagnostic_record_from_input( - "logger-test", - LogLevel::Warn, - LoggerDiagnosticInput { - code: "TEST_WARN".to_string(), - title: "Pretty output".to_string(), - root_cause: vec![ - "The warning must stay readable.".to_string(), - "Each copyText entry should appear on its own line.".to_string(), - ], - exact_fix: Some(vec!["Use pretty JSON for diagnostics.".to_string()]), - possible_fixes: None, - details: Some(Map::from_iter([( - "path".to_string(), - Value::String("C:\\runtime\\plugin".to_string()), - )])), - }, - ); - - let rendered = render_diagnostic_output(LogLevel::Warn, &record); - assert!(rendered.contains("### Pretty output")); - assert!(rendered.contains("**What happened**")); - assert!(rendered.contains(" - The warning must stay readable.")); - assert!(rendered.contains("**Context**")); - assert!(rendered.contains(" - path: C:\\runtime\\plugin")); - } - - #[test] - fn test_build_copy_text_renders_context_without_json_braces() { - let record = diagnostic_record_from_input( - "logger-test", - LogLevel::Warn, - LoggerDiagnosticInput { - code: "TEST_WARN".to_string(), - title: "Context output".to_string(), - root_cause: vec!["Keep context readable.".to_string()], - exact_fix: None, - possible_fixes: None, - details: Some(Map::from_iter([ - ( - "path".to_string(), - Value::String("C:\\runtime\\plugin".to_string()), - ), - ("phase".to_string(), Value::String("cleanup".to_string())), - ])), - }, - ); - - assert!( - record - .copy_text - .contains(&" - path: C:\\runtime\\plugin".to_string()) - ); - assert!(record.copy_text.contains(&" - phase: cleanup".to_string())); - assert!(!record.copy_text.iter().any(|line| line == "{")); - } - - #[test] - fn test_silent_logger_buffers_diagnostics() { - clear_buffered_diagnostics(); - - let logger = create_logger("buffer-test", Some(LogLevel::Silent)); - assert!( - logger - .warn(LoggerDiagnosticInput { - code: "BUFFERED_WARN".to_string(), - title: "Buffered diagnostic".to_string(), - root_cause: vec!["Silent mode should still retain diagnostics.".to_string()], - exact_fix: None, - possible_fixes: None, - details: None, - }) - .is_none() - ); - - let drained = drain_buffered_diagnostics(); - assert_eq!(drained.len(), 1); - assert_eq!(drained[0].code, "BUFFERED_WARN"); - } - - #[derive(Default)] - struct FlushTrackingWriter { - writes: Vec, - flush_count: usize, - } - - impl Write for FlushTrackingWriter { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - self.writes.extend_from_slice(buf); - Ok(buf.len()) - } - - fn flush(&mut self) -> std::io::Result<()> { - self.flush_count += 1; - Ok(()) - } - } - - #[test] - fn test_write_output_line_flushes_each_message() { - let mut writer = FlushTrackingWriter::default(); - - write_output_line(&mut writer, "### hello").unwrap(); - - assert_eq!(String::from_utf8(writer.writes).unwrap(), "### hello\n"); - assert_eq!(writer.flush_count, 1); - } -} diff --git a/sdk/src/infra/logger/core.rs b/sdk/src/infra/logger/core.rs new file mode 100644 index 00000000..47191a45 --- /dev/null +++ b/sdk/src/infra/logger/core.rs @@ -0,0 +1,263 @@ +use std::sync::atomic::{AtomicU8, Ordering}; +use std::time::{Duration, Instant}; + +use serde::Serialize; +use serde_json::Value; + +use super::diagnostic::{DiagnosticInput, invalid_record, record_from_input, validate_diagnostic_input}; +use super::sink::buffer_diagnostic; + +// --------------------------------------------------------------------------- +// Log levels +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize)] +#[serde(rename_all = "lowercase")] +pub enum LogLevel { + Silent, + Fatal, + Error, + Warn, + Info, + Debug, + Trace, +} + +impl LogLevel { + pub fn priority(self) -> u8 { + match self { + Self::Silent => 0, + Self::Fatal => 1, + Self::Error => 2, + Self::Warn => 3, + Self::Info => 4, + Self::Debug => 5, + Self::Trace => 6, + } + } + + pub fn as_str(self) -> &'static str { + match self { + Self::Silent => "silent", + Self::Fatal => "fatal", + Self::Error => "error", + Self::Warn => "warn", + Self::Info => "info", + Self::Debug => "debug", + Self::Trace => "trace", + } + } + + pub fn from_str_loose(s: &str) -> Option { + match s.to_ascii_lowercase().as_str() { + "silent" => Some(Self::Silent), + "fatal" => Some(Self::Fatal), + "error" => Some(Self::Error), + "warn" => Some(Self::Warn), + "info" => Some(Self::Info), + "debug" => Some(Self::Debug), + "trace" => Some(Self::Trace), + _ => None, + } + } +} + +// --------------------------------------------------------------------------- +// Span +// --------------------------------------------------------------------------- + +/// An operation span that tracks timing and nesting. +#[derive(Debug, Clone)] +pub struct Span { + pub name: String, + pub namespace: String, + pub start: Instant, +} + +impl Span { + pub fn new(name: &str, namespace: &str) -> Self { + Self { + name: name.to_string(), + namespace: namespace.to_string(), + start: Instant::now(), + } + } + + pub fn enter(&self) -> SpanGuard { + SpanGuard::new(self.clone()) + } + + pub fn duration(&self) -> Duration { + self.start.elapsed() + } +} + +/// RAII guard that emits span exit event on drop. +pub struct SpanGuard { + span: Span, + exited: bool, +} + +impl SpanGuard { + fn new(span: Span) -> Self { + // Emit span enter event immediately + crate::infra::logger::sink::write_span_enter(&span); + Self { span, exited: false } + } + + pub fn exit(mut self) { + self.do_exit(); + } + + fn do_exit(&mut self) { + if self.exited { + return; + } + self.exited = true; + crate::infra::logger::sink::write_span_exit(&self.span); + } +} + +impl Drop for SpanGuard { + fn drop(&mut self) { + self.do_exit(); + } +} + +// --------------------------------------------------------------------------- +// Event +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone)] +pub struct Event { + pub level: LogLevel, + pub namespace: String, + pub message: Value, + pub meta: Option, + pub span_name: Option, +} + +// --------------------------------------------------------------------------- +// Logger +// --------------------------------------------------------------------------- + +/// A namespaced logger with configurable level. +pub struct Logger { + pub namespace: String, + pub level: LogLevel, +} + +impl Logger { + pub fn new(namespace: &str, level: LogLevel) -> Self { + Self { + namespace: namespace.to_string(), + level, + } + } + + pub fn info(&self, message: impl Into, meta: Option) { + self.log_message(LogLevel::Info, message.into(), meta); + } + + pub fn debug(&self, message: impl Into, meta: Option) { + self.log_message(LogLevel::Debug, message.into(), meta); + } + + pub fn trace(&self, message: impl Into, meta: Option) { + self.log_message(LogLevel::Trace, message.into(), meta); + } + + pub fn warn(&self, diagnostic: DiagnosticInput) { + self.log_diagnostic(LogLevel::Warn, diagnostic); + } + + pub fn error(&self, diagnostic: DiagnosticInput) { + self.log_diagnostic(LogLevel::Error, diagnostic); + } + + pub fn fatal(&self, diagnostic: DiagnosticInput) { + self.log_diagnostic(LogLevel::Fatal, diagnostic); + } + + pub fn span(&self, name: &str) -> Span { + Span::new(name, &self.namespace) + } + + fn should_emit(&self, level: LogLevel) -> bool { + level.priority() <= self.level.priority() + } + + fn log_message(&self, level: LogLevel, message: Value, meta: Option) { + if !self.should_emit(level) { + return; + } + let event = Event { + level, + namespace: self.namespace.clone(), + message, + meta, + span_name: None, + }; + crate::infra::logger::sink::write_event(&event); + } + + fn log_diagnostic(&self, level: LogLevel, diagnostic: DiagnosticInput) { + let record = match validate_diagnostic_input(&diagnostic) { + Ok(()) => record_from_input(&self.namespace, level.as_str(), diagnostic), + Err(errors) => { + invalid_record(&self.namespace, level.as_str(), serde_json::to_value(&diagnostic).unwrap_or_default(), &errors) + } + }; + + // Buffer diagnostics even if level is Silent + buffer_diagnostic(&record); + + if !self.should_emit(level) { + return; + } + + let event = Event { + level, + namespace: self.namespace.clone(), + message: serde_json::to_value(&record).unwrap_or_default(), + meta: None, + span_name: None, + }; + crate::infra::logger::sink::write_event(&event); + } +} + +// --------------------------------------------------------------------------- +// Global state +// --------------------------------------------------------------------------- + +static GLOBAL_LEVEL: AtomicU8 = AtomicU8::new(4); // Info default + +pub fn set_global_level(level: LogLevel) { + GLOBAL_LEVEL.store(level.priority(), Ordering::Relaxed); +} + +pub fn get_global_level() -> LogLevel { + match GLOBAL_LEVEL.load(Ordering::Relaxed) { + 0 => LogLevel::Silent, + 1 => LogLevel::Fatal, + 2 => LogLevel::Error, + 3 => LogLevel::Warn, + 4 => LogLevel::Info, + 5 => LogLevel::Debug, + 6 => LogLevel::Trace, + _ => LogLevel::Info, + } +} + +pub fn resolve_level(explicit: Option) -> LogLevel { + if let Some(l) = explicit { + return l; + } + if let Ok(env_val) = std::env::var("LOG_LEVEL") + && let Some(l) = LogLevel::from_str_loose(&env_val) + { + return l; + } + get_global_level() +} diff --git a/sdk/src/infra/logger/diagnostic.rs b/sdk/src/infra/logger/diagnostic.rs new file mode 100644 index 00000000..22223c71 --- /dev/null +++ b/sdk/src/infra/logger/diagnostic.rs @@ -0,0 +1,217 @@ +//! Structured diagnostic types for error/warning/fatal logging. + +use serde::{Deserialize, Serialize}; +use serde_json::{Map, Value}; + +/// Input schema for a structured diagnostic log entry. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DiagnosticInput { + pub code: String, + pub title: String, + pub root_cause: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub exact_fix: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub possible_fixes: Option>>, + #[serde(skip_serializing_if = "Option::is_none")] + pub details: Option>, +} + +/// Full diagnostic record including runtime metadata. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DiagnosticRecord { + pub code: String, + pub title: String, + pub root_cause: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub exact_fix: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub possible_fixes: Option>>, + #[serde(skip_serializing_if = "Option::is_none")] + pub details: Option>, + pub level: String, + pub namespace: String, + pub copy_text: Vec, +} + +/// Validate a diagnostic input for required fields. +pub fn validate_diagnostic_input(input: &DiagnosticInput) -> Result<(), Vec> { + let mut errors: Vec = Vec::new(); + + if input.code.trim().is_empty() { + errors.push("code must be a non-empty string".to_string()); + } + if input.title.trim().is_empty() { + errors.push("title must be a non-empty string".to_string()); + } + if input.root_cause.is_empty() { + errors.push("rootCause must contain at least one line".to_string()); + } + + if let Some(lines) = &input.exact_fix && lines.is_empty() { + errors.push("exactFix must contain at least one line when provided".to_string()); + } + + if let Some(fixes) = &input.possible_fixes { + if fixes.is_empty() { + errors.push("possibleFixes must contain at least one fix when provided".to_string()); + } + for (index, lines) in fixes.iter().enumerate() { + if lines.is_empty() { + errors.push(format!("possibleFixes[{index}] must contain at least one line")); + } + } + } + + if errors.is_empty() { + Ok(()) + } else { + Err(errors) + } +} + +/// Build copy-friendly text from a diagnostic record. +pub fn build_copy_text(record: &DiagnosticRecord) -> Vec { + let mut lines = vec![record.title.clone()]; + + append_section(&mut lines, "**What happened**", &record.root_cause, None); + + if let Some(exact_fix) = &record.exact_fix { + append_section(&mut lines, "**Do this**", exact_fix, None); + } + + if let Some(possible_fixes) = &record.possible_fixes + && !possible_fixes.is_empty() + { + if !lines.is_empty() { + lines.push(String::new()); + } + lines.push("**Try this if needed**".to_string()); + for (index, fix) in possible_fixes.iter().enumerate() { + let mut iter = fix.iter(); + if let Some(first) = iter.next() { + lines.push(format!(" {}. {}", index + 1, first)); + } + for entry in iter { + lines.push(format!(" {entry}")); + } + } + } + + if let Some(details) = &record.details + && !details.is_empty() + { + if !lines.is_empty() { + lines.push(String::new()); + } + lines.push("**Context**".to_string()); + let mut detail_lines = value_to_markdown_lines(&Value::Object(details.clone())); + for line in &mut detail_lines { + line.insert_str(0, " "); + } + lines.extend(detail_lines); + } + + lines +} + +fn append_section( + lines: &mut Vec, + title: &str, + entries: &[String], + numbered: Option, +) { + if entries.is_empty() { + return; + } + + if !lines.is_empty() { + lines.push(String::new()); + } + + if !title.is_empty() { + lines.push(title.to_string()); + } + + match numbered { + Some(number) => { + let mut iter = entries.iter(); + if let Some(first) = iter.next() { + lines.push(format!(" {number}. {first}")); + } + for entry in iter { + lines.push(format!(" {entry}")); + } + } + None => { + for entry in entries { + lines.push(format!(" - {entry}")); + } + } + } +} + +use super::formatter::value_to_markdown_lines; + +/// Build a diagnostic record from validated input. +pub fn record_from_input( + namespace: &str, + level: &str, + input: DiagnosticInput, +) -> DiagnosticRecord { + let mut record = DiagnosticRecord { + code: input.code.trim().to_string(), + title: input.title.trim().to_string(), + root_cause: input.root_cause, + exact_fix: input.exact_fix, + possible_fixes: input.possible_fixes, + details: input.details, + level: level.to_string(), + namespace: namespace.to_string(), + copy_text: Vec::new(), + }; + record.copy_text = build_copy_text(&record); + record +} + +/// Build a fallback diagnostic record for invalid input. +pub fn invalid_record( + namespace: &str, + level: &str, + raw_payload: Value, + validation_errors: &[String], +) -> DiagnosticRecord { + let mut details = Map::new(); + details.insert("rawPayload".to_string(), raw_payload); + details.insert( + "validationErrors".to_string(), + Value::Array( + validation_errors + .iter() + .map(|e| Value::String(e.clone())) + .collect(), + ), + ); + + let mut record = DiagnosticRecord { + code: "LOGGER_DIAGNOSTIC_SCHEMA_INVALID".to_string(), + title: "Logger diagnostic payload is invalid".to_string(), + root_cause: vec![ + "The logger received a warn/error/fatal payload that does not match the required diagnostic schema.".to_string(), + format!("Validation issues: {}", validation_errors.join("; ")), + ], + exact_fix: Some(vec![ + "Pass a diagnostic object with non-empty code, title, and rootCause fields.".to_string(), + "Keep exactFix and each possibleFixes entry as non-empty string arrays when they are present.".to_string(), + ]), + possible_fixes: None, + details: Some(details), + level: level.to_string(), + namespace: namespace.to_string(), + copy_text: Vec::new(), + }; + record.copy_text = build_copy_text(&record); + record +} diff --git a/sdk/src/infra/logger/formatter.rs b/sdk/src/infra/logger/formatter.rs new file mode 100644 index 00000000..768ba641 --- /dev/null +++ b/sdk/src/infra/logger/formatter.rs @@ -0,0 +1,228 @@ +use serde_json::Value; + +use super::core::{Event, LogLevel, Span}; + +/// Format an event as Markdown. +pub fn format_event(event: &Event) -> String { + match event.level { + LogLevel::Warn | LogLevel::Error | LogLevel::Fatal => { + format_diagnostic_event(event) + } + _ => { + format_message_event(event) + } + } +} + +/// Format a span enter event. +pub fn format_span_enter(span: &Span) -> String { + format!("### {} started", span.name) +} + +/// Format a span exit event with duration. +pub fn format_span_exit(span: &Span) -> String { + let duration_ms = span.duration().as_millis(); + format!("### {} completed\n - duration: {}ms", span.name, duration_ms) +} + +fn format_message_event(event: &Event) -> String { + let (title, meta_lines) = extract_message_and_meta(&event.message, event.meta.as_ref()); + let mut lines = Vec::new(); + + if let Some(title) = title { + if title.contains('\n') { + let parts: Vec<&str> = title.splitn(2, '\n').collect(); + lines.push(format!("### {}", parts[0].trim())); + lines.push(String::new()); + lines.push(parts[1].trim().to_string()); + } else { + lines.push(format!("### {}", title)); + } + } else { + lines.push("### Details".to_string()); + } + + if !meta_lines.is_empty() { + lines.push(String::new()); + lines.extend(meta_lines); + } + + lines.join("\n") +} + +fn format_diagnostic_event(event: &Event) -> String { + // For diagnostic events, the message contains the serialized DiagnosticRecord + let record: super::diagnostic::DiagnosticRecord = match serde_json::from_value(event.message.clone()) { + Ok(r) => r, + Err(_) => return "### Diagnostic error\n - failed to parse diagnostic record".to_string(), + }; + + let mut lines = vec![format!("### {}", record.title)]; + + if !record.root_cause.is_empty() { + lines.push(String::new()); + lines.push("**What happened**".to_string()); + for cause in &record.root_cause { + lines.push(format!(" - {cause}")); + } + } + + if let Some(exact_fix) = &record.exact_fix + && !exact_fix.is_empty() + { + lines.push(String::new()); + lines.push("**Do this**".to_string()); + for fix in exact_fix { + lines.push(format!(" - {fix}")); + } + } + + if let Some(possible_fixes) = &record.possible_fixes + && !possible_fixes.is_empty() + { + lines.push(String::new()); + lines.push("**Try this if needed**".to_string()); + for (index, fix) in possible_fixes.iter().enumerate() { + let mut iter = fix.iter(); + if let Some(first) = iter.next() { + lines.push(format!(" {}. {}", index + 1, first)); + } + for entry in iter { + lines.push(format!(" {entry}")); + } + } + } + + if let Some(details) = &record.details + && !details.is_empty() + { + lines.push(String::new()); + lines.push("**Context**".to_string()); + let mut detail_lines = value_to_markdown_lines(&Value::Object(details.clone())); + for line in &mut detail_lines { + line.insert_str(0, " "); + } + lines.extend(detail_lines); + } + + lines.join("\n") +} + +fn extract_message_and_meta(message: &Value, meta: Option<&Value>) -> (Option, Vec) { + let (msg, mut lines) = match message { + Value::String(s) => (Some(s.clone()), Vec::new()), + Value::Object(map) => { + if let Some(Value::String(msg)) = map.get("message") { + let mut remainder = map.clone(); + remainder.remove("message"); + let lines = if remainder.is_empty() { + Vec::new() + } else { + value_to_markdown_lines(&Value::Object(remainder)) + }; + (Some(msg.clone()), lines) + } else if map.len() == 1 { + let (key, val) = map.iter().next().unwrap(); + match val { + Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => { + (Some(format!("{key}: {}", scalar_to_text(val))), Vec::new()) + } + Value::Array(items) if !items.is_empty() => { + (Some(key.clone()), value_to_markdown_lines(val)) + } + Value::Object(obj) if !obj.is_empty() => { + (Some(key.clone()), value_to_markdown_lines(val)) + } + _ => (None, value_to_markdown_lines(message)), + } + } else { + (None, value_to_markdown_lines(message)) + } + } + _ => (None, value_to_markdown_lines(message)), + }; + + // Merge external meta if provided + if let Some(meta_val) = meta + && !meta_val.is_null() + { + let meta_lines = value_to_markdown_lines(meta_val); + lines.extend(meta_lines); + } + + (msg, lines) +} + +pub(crate) fn value_to_markdown_lines(value: &Value) -> Vec { + let mut lines = Vec::new(); + append_markdown_value(&mut lines, None, value, 0); + lines +} + +pub(crate) fn append_markdown_value(lines: &mut Vec, label: Option<&str>, value: &Value, depth: usize) { + let prefix = " ".repeat(depth); + let bullet = format!("{prefix}- "); + + match value { + Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => match label { + Some(name) => { + lines.push(format!("{bullet}{name}: {}", scalar_to_text(value))); + } + None => { + lines.push(format!("{bullet}{}", scalar_to_text(value))); + } + }, + Value::Array(items) => { + if items.is_empty() { + match label { + Some(name) => lines.push(format!("{bullet}{name}: []")), + None => lines.push(format!("{bullet}[]")), + } + return; + } + + if let Some(name) = label { + lines.push(format!("{bullet}{name}:")); + for item in items { + append_markdown_value(lines, None, item, depth + 1); + } + return; + } + + for item in items { + append_markdown_value(lines, None, item, depth); + } + } + Value::Object(map) => { + if map.is_empty() { + match label { + Some(name) => lines.push(format!("{bullet}{name}: {{}}")), + None => lines.push(format!("{bullet}{{}}")), + } + return; + } + + if let Some(name) = label { + lines.push(format!("{bullet}{name}:")); + for (key, nested) in map { + append_markdown_value(lines, Some(key), nested, depth + 1); + } + return; + } + + for (key, nested) in map { + append_markdown_value(lines, Some(key), nested, depth); + } + } + } +} + +pub(crate) fn scalar_to_text(value: &Value) -> String { + match value { + Value::Null => "null".to_string(), + Value::Bool(b) => b.to_string(), + Value::Number(n) => n.to_string(), + Value::String(s) => s.clone(), + Value::Array(_) | Value::Object(_) => serde_json::to_string(value).unwrap_or_default(), + } +} diff --git a/sdk/src/infra/logger/mod.rs b/sdk/src/infra/logger/mod.rs new file mode 100644 index 00000000..39009be4 --- /dev/null +++ b/sdk/src/infra/logger/mod.rs @@ -0,0 +1,305 @@ +#![deny(clippy::all)] + +//! Structured Markdown logger with span tracking for observability. +//! +//! Output format: Markdown only. No JSON, no ANSI colors. +//! Destination: stdout for info/debug/trace, stderr for warn/error/fatal. + +pub mod core; +pub mod diagnostic; +pub mod formatter; +pub mod sink; + +pub use core::{LogLevel, Logger, Span, SpanGuard, get_global_level, resolve_level, set_global_level}; +pub use diagnostic::{DiagnosticInput, DiagnosticRecord, validate_diagnostic_input}; +pub use sink::{clear_diagnostics, drain_diagnostics, flush}; + +// Legacy re-exports for backward compatibility during migration +pub use diagnostic::DiagnosticInput as LoggerDiagnosticInput; +pub use diagnostic::DiagnosticRecord as LoggerDiagnosticRecord; + +/// Create a new logger with optional explicit level. +/// Falls back to global level or environment variable `LOG_LEVEL`. +pub fn create_logger(namespace: &str, explicit_level: Option) -> Logger { + let level = resolve_level(explicit_level); + Logger::new(namespace, level) +} + +// --------------------------------------------------------------------------- +// Convenience macros +// --------------------------------------------------------------------------- + +#[macro_export] +macro_rules! info { + ($logger:expr, $msg:expr) => { + $logger.info(serde_json::Value::String($msg.to_string()), None) + }; + ($logger:expr, $msg:expr, $meta:expr) => { + $logger.info(serde_json::Value::String($msg.to_string()), Some($meta)) + }; +} + +#[macro_export] +macro_rules! debug { + ($logger:expr, $msg:expr) => { + $logger.debug(serde_json::Value::String($msg.to_string()), None) + }; + ($logger:expr, $msg:expr, $meta:expr) => { + $logger.debug(serde_json::Value::String($msg.to_string()), Some($meta)) + }; +} + +#[macro_export] +macro_rules! trace { + ($logger:expr, $msg:expr) => { + $logger.trace(serde_json::Value::String($msg.to_string()), None) + }; + ($logger:expr, $msg:expr, $meta:expr) => { + $logger.trace(serde_json::Value::String($msg.to_string()), Some($meta)) + }; +} + +#[macro_export] +macro_rules! warn { + ($logger:expr, $diag:expr) => { + $logger.warn($diag) + }; +} + +#[macro_export] +macro_rules! error { + ($logger:expr, $diag:expr) => { + $logger.error($diag) + }; +} + +#[macro_export] +macro_rules! fatal { + ($logger:expr, $diag:expr) => { + $logger.fatal($diag) + }; +} + +#[macro_export] +macro_rules! span { + ($logger:expr, $name:expr) => { + $logger.span($name) + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use crate::infra::logger::core::Event; + use serde_json::Value; + + #[test] + fn test_log_level_priority_ordering() { + assert!(LogLevel::Silent.priority() < LogLevel::Fatal.priority()); + assert!(LogLevel::Fatal.priority() < LogLevel::Error.priority()); + assert!(LogLevel::Error.priority() < LogLevel::Warn.priority()); + assert!(LogLevel::Warn.priority() < LogLevel::Info.priority()); + assert!(LogLevel::Info.priority() < LogLevel::Debug.priority()); + assert!(LogLevel::Debug.priority() < LogLevel::Trace.priority()); + } + + #[test] + fn test_log_level_from_str_case_insensitive() { + assert_eq!(LogLevel::from_str_loose("info"), Some(LogLevel::Info)); + assert_eq!(LogLevel::from_str_loose("INFO"), Some(LogLevel::Info)); + assert_eq!(LogLevel::from_str_loose("Debug"), Some(LogLevel::Debug)); + assert_eq!(LogLevel::from_str_loose("unknown"), None); + } + + #[test] + fn test_create_logger_uses_global_level() { + set_global_level(LogLevel::Debug); + let logger = create_logger("test", None); + assert_eq!(logger.level, LogLevel::Debug); + set_global_level(LogLevel::Info); // reset + } + + #[test] + fn test_create_logger_uses_explicit_level() { + set_global_level(LogLevel::Info); + let logger = create_logger("test", Some(LogLevel::Warn)); + assert_eq!(logger.level, LogLevel::Warn); + } + + #[test] + fn test_logger_filters_by_level() { + let logger = Logger::new("test", LogLevel::Warn); + // These should not panic or emit; just verify they don't crash + logger.info("should be filtered", None); + logger.debug("should be filtered", None); + logger.trace("should be filtered", None); + // Warn, Error, Fatal should be emitted (but we can't easily capture in unit test) + } + + #[test] + fn test_span_creation() { + let span = Span::new("test-span", "test-ns"); + assert_eq!(span.name, "test-span"); + assert_eq!(span.namespace, "test-ns"); + } + + #[test] + fn test_span_tracks_duration() { + let span = Span::new("test", "ns"); + std::thread::sleep(std::time::Duration::from_millis(1)); + let duration = span.duration(); + assert!(duration > std::time::Duration::ZERO); + } + + #[test] + fn test_diagnostic_validation_rejects_empty_fields() { + let diag = DiagnosticInput { + code: "".to_string(), + title: "".to_string(), + root_cause: vec![], + exact_fix: None, + possible_fixes: None, + details: None, + }; + let result = validate_diagnostic_input(&diag); + assert!(result.is_err()); + let errors = result.unwrap_err(); + assert!(errors.iter().any(|e| e.contains("code"))); + assert!(errors.iter().any(|e| e.contains("title"))); + assert!(errors.iter().any(|e| e.contains("rootCause"))); + } + + #[test] + fn test_diagnostic_validation_accepts_valid_input() { + let diag = DiagnosticInput { + code: "TEST".to_string(), + title: "Test diagnostic".to_string(), + root_cause: vec!["Something went wrong".to_string()], + exact_fix: Some(vec!["Fix it".to_string()]), + possible_fixes: None, + details: None, + }; + assert!(validate_diagnostic_input(&diag).is_ok()); + } + + #[test] + fn test_diagnostic_buffering() { + clear_diagnostics(); + let record = DiagnosticRecord { + code: "BUF_TEST".to_string(), + title: "Buffered".to_string(), + root_cause: vec!["test".to_string()], + exact_fix: None, + possible_fixes: None, + details: None, + level: "warn".to_string(), + namespace: "test".to_string(), + copy_text: vec![], + }; + sink::buffer_diagnostic(&record); + let drained = drain_diagnostics(); + assert_eq!(drained.len(), 1); + assert_eq!(drained[0].code, "BUF_TEST"); + } + + #[test] + fn test_flush_completes_without_panic() { + // Just verify flush doesn't panic + flush(); + } + + #[test] + fn test_global_level_get_set() { + let original = get_global_level(); + set_global_level(LogLevel::Debug); + assert_eq!(get_global_level(), LogLevel::Debug); + set_global_level(LogLevel::Trace); + assert_eq!(get_global_level(), LogLevel::Trace); + set_global_level(original); // restore + } + + #[test] + fn test_resolve_level_explicit_wins() { + set_global_level(LogLevel::Info); + let level = resolve_level(Some(LogLevel::Error)); + assert_eq!(level, LogLevel::Error); + } + + #[test] + fn test_resolve_level_fallback_to_global() { + set_global_level(LogLevel::Warn); + unsafe { std::env::remove_var("LOG_LEVEL"); } + let level = resolve_level(None); + assert_eq!(level, LogLevel::Warn); + } + + #[test] + fn test_thread_safety() { + use std::sync::Arc; + use std::thread; + + let logger = Arc::new(Logger::new("thread-test", LogLevel::Trace)); + let mut handles = Vec::new(); + + for i in 0..10 { + let log = Arc::clone(&logger); + handles.push(thread::spawn(move || { + log.info(format!("thread-{i}"), None); + log.debug(format!("debug-{i}"), None); + let _span = log.span(format!("span-{i}").as_str()).enter(); + log.warn(DiagnosticInput { + code: format!("WARN-{i}"), + title: format!("Warning {i}"), + root_cause: vec!["test".to_string()], + exact_fix: None, + possible_fixes: None, + details: None, + }); + })); + } + + for h in handles { + h.join().expect("thread should not panic"); + } + + // Verify flush completes without deadlock + flush(); + } + + #[test] + fn test_sink_stderr_routing_for_errors() { + // Verify that error/fatal/warn events are routed to stderr + // by checking the internal use_stderr logic via a controlled event. + let warn_event = Event { + level: LogLevel::Warn, + namespace: "test".to_string(), + message: Value::String("warn msg".to_string()), + meta: None, + span_name: None, + }; + let error_event = Event { + level: LogLevel::Error, + namespace: "test".to_string(), + message: Value::String("error msg".to_string()), + meta: None, + span_name: None, + }; + let info_event = Event { + level: LogLevel::Info, + namespace: "test".to_string(), + message: Value::String("info msg".to_string()), + meta: None, + span_name: None, + }; + + // These should not panic; stderr routing is verified by the sink's use_stderr logic. + sink::write_event(&warn_event); + sink::write_event(&error_event); + sink::write_event(&info_event); + flush(); + } +} diff --git a/sdk/src/infra/logger/sink.rs b/sdk/src/infra/logger/sink.rs new file mode 100644 index 00000000..2aab9a9c --- /dev/null +++ b/sdk/src/infra/logger/sink.rs @@ -0,0 +1,133 @@ +use std::io::{self, Write}; +use std::sync::mpsc::{self, Sender, Receiver}; +use std::sync::{LazyLock, Mutex}; +use std::thread; + +use super::core::{Event, LogLevel, Span}; +use super::diagnostic::DiagnosticRecord; +use super::formatter; + +// --------------------------------------------------------------------------- +// Output command types +// --------------------------------------------------------------------------- + +enum OutputCommand { + Write { use_stderr: bool, output: String }, + Flush { ack: Sender<()> }, +} + +// --------------------------------------------------------------------------- +// Global state +// --------------------------------------------------------------------------- + +static OUTPUT_SINK: LazyLock> = LazyLock::new(spawn_output_sink); +static DIAGNOSTIC_BUFFER: LazyLock>> = + LazyLock::new(|| Mutex::new(Vec::new())); + +// --------------------------------------------------------------------------- +// Public API +// --------------------------------------------------------------------------- + +pub fn write_event(event: &Event) { + let use_stderr = matches!(event.level, LogLevel::Error | LogLevel::Fatal | LogLevel::Warn); + let output = formatter::format_event(event); + send_output(use_stderr, output); +} + +pub fn write_span_enter(span: &Span) { + let output = formatter::format_span_enter(span); + send_output(false, output); +} + +pub fn write_span_exit(span: &Span) { + let output = formatter::format_span_exit(span); + send_output(false, output); +} + +pub fn buffer_diagnostic(record: &DiagnosticRecord) { + if let Ok(mut buf) = DIAGNOSTIC_BUFFER.lock() { + buf.push(record.clone()); + } +} + +pub fn drain_diagnostics() -> Vec { + match DIAGNOSTIC_BUFFER.lock() { + Ok(mut buf) => std::mem::take(&mut *buf), + Err(_) => Vec::new(), + } +} + +pub fn clear_diagnostics() { + if let Ok(mut buf) = DIAGNOSTIC_BUFFER.lock() { + buf.clear(); + } +} + +pub fn flush() { + let (ack_tx, ack_rx) = mpsc::channel(); + if OUTPUT_SINK.send(OutputCommand::Flush { ack: ack_tx }).is_ok() { + let _ = ack_rx.recv(); + } +} + +// --------------------------------------------------------------------------- +// Internal helpers +// --------------------------------------------------------------------------- + +fn send_output(use_stderr: bool, output: String) { + if OUTPUT_SINK + .send(OutputCommand::Write { use_stderr, output: output.clone() }) + .is_err() + { + // Fallback: write directly if sink thread is dead + write_direct(use_stderr, &output); + } +} + +fn write_direct(use_stderr: bool, output: &str) { + if use_stderr { + let mut stderr = io::stderr().lock(); + let _ = writeln!(stderr, "{output}"); + let _ = stderr.flush(); + } else { + let mut stdout = io::stdout().lock(); + let _ = writeln!(stdout, "{output}"); + let _ = stdout.flush(); + } +} + +fn spawn_output_sink() -> Sender { + let (tx, rx) = mpsc::channel(); + thread::Builder::new() + .name("tnmsd-logger".to_string()) + .spawn(move || output_worker(rx)) + .expect("failed to spawn logger output worker"); + tx +} + +fn output_worker(receiver: Receiver) { + let stdout = io::stdout(); + let stderr = io::stderr(); + let mut stdout_writer = io::BufWriter::new(stdout); + let mut stderr_writer = io::BufWriter::new(stderr); + + while let Ok(command) = receiver.recv() { + match command { + OutputCommand::Write { use_stderr, output } => { + if use_stderr { + let _ = writeln!(stderr_writer, "{output}"); + } else { + let _ = writeln!(stdout_writer, "{output}"); + } + } + OutputCommand::Flush { ack } => { + let _ = stdout_writer.flush(); + let _ = stderr_writer.flush(); + let _ = ack.send(()); + } + } + } + + let _ = stdout_writer.flush(); + let _ = stderr_writer.flush(); +} diff --git a/sdk/src/infra/mod.rs b/sdk/src/infra/mod.rs index 5a1c988d..f96a629c 100644 --- a/sdk/src/infra/mod.rs +++ b/sdk/src/infra/mod.rs @@ -2,6 +2,7 @@ pub mod deno_runtime; pub mod desk_paths; pub mod diagnostic_helpers; pub mod file_ops; +pub mod git_fs; pub mod logger; pub mod md_compiler; pub mod path_types; @@ -12,4 +13,5 @@ pub use file_ops::{ write_file_sync, }; +pub use git_fs::{find_all_git_repos, resolve_git_info_dir}; pub use path_types::{FilePathKind, RelativePath, RootPath}; diff --git a/sdk/src/infra/script_runtime.rs b/sdk/src/infra/script_runtime.rs index ba52aaff..0d74ee15 100644 --- a/sdk/src/infra/script_runtime.rs +++ b/sdk/src/infra/script_runtime.rs @@ -408,7 +408,7 @@ mod tests { assert_eq!(proxied, "____.git/info/exclude"); let validated = validate_public_path_impl(&proxied, "/tmp/ws/aindex/public")?; - assert_eq!(validated, "____.git/info/exclude"); + assert_eq!(validated.replace('\\', "/"), "____.git/info/exclude"); Ok(()) } @@ -418,7 +418,7 @@ mod tests { assert_eq!(proxied, "____vscode/settings.json"); let validated = validate_public_path_impl(&proxied, "/tmp/ws/aindex/public")?; - assert_eq!(validated, "____vscode/settings.json"); + assert_eq!(validated.replace('\\', "/"), "____vscode/settings.json"); Ok(()) } @@ -438,7 +438,7 @@ mod tests { assert_eq!(proxied, "____idea/.gitignore"); let validated = validate_public_path_impl(&proxied, "/tmp/ws/aindex/public")?; - assert_eq!(validated, "____idea/.gitignore"); + assert_eq!(validated.replace('\\', "/"), "____idea/.gitignore"); Ok(()) } @@ -467,7 +467,7 @@ console.log(`generated/${ctx.logicalPath}`) serde_json::json!({ "scope": "skill" }), )?; - assert_eq!(resolved, "generated/daily/note.md"); + assert_eq!(resolved.replace('\\', "/"), "generated/daily/note.md"); Ok(()) } } diff --git a/sdk/src/lib.rs b/sdk/src/lib.rs index 0686b885..72e2ed10 100644 --- a/sdk/src/lib.rs +++ b/sdk/src/lib.rs @@ -18,7 +18,7 @@ pub use infra::md_compiler; pub use services::clean_service; pub use services::dry_run_service; pub use services::install_service; -pub use services::prompts::{ +pub use services::prompt_service::{ ListPromptsOptions, ManagedPromptKind, PromptArtifactRecord, PromptArtifactState, PromptCatalogItem, PromptCatalogPaths, PromptCatalogPresence, PromptDetails, PromptServiceOptions, PromptSourceLocale, UpsertPromptSourceInput, WritePromptArtifactsInput, diff --git a/sdk/src/policy/cleanup.rs b/sdk/src/policy/cleanup.rs index 2edb4d83..cc176b92 100644 --- a/sdk/src/policy/cleanup.rs +++ b/sdk/src/policy/cleanup.rs @@ -4,10 +4,15 @@ use std::fs; use std::path::{Component, Path, PathBuf}; use globset::{Glob, GlobBuilder, GlobSet, GlobSetBuilder}; -use serde::{Deserialize, Serialize}; use serde_json::json; use walkdir::WalkDir; +pub use crate::domain::cleanup::{ + CleanupDeclarationsDto, CleanupErrorDto, CleanupErrorKindDto, CleanupExecutionResultDto, + CleanupPlan, CleanupProtectionConflictDto, CleanupSnapshot, CleanupTargetDto, + CleanupTargetKindDto, PluginCleanupSnapshotDto, ProtectedPathViolationDto, ProtectedRuleDto, + ProtectionModeDto, ProtectionRuleMatcherDto, +}; use crate::domain::config; use crate::infra::desk_paths; use crate::infra::logger::create_logger; @@ -39,149 +44,6 @@ const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES: [&str; 15] = [ "volumes", ]; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum ProtectionModeDto { - Direct, - Recursive, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum ProtectionRuleMatcherDto { - Path, - Glob, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum CleanupTargetKindDto { - File, - Directory, - Glob, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum CleanupErrorKindDto { - File, - Directory, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupTargetDto { - pub path: String, - pub kind: CleanupTargetKindDto, - #[serde(default)] - pub exclude_basenames: Vec, - pub protection_mode: Option, - pub scope: Option, - pub label: Option, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupDeclarationsDto { - #[serde(default)] - pub delete: Vec, - #[serde(default)] - pub protect: Vec, - #[serde(default)] - pub exclude_scan_globs: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct PluginCleanupSnapshotDto { - pub plugin_name: String, - #[serde(default)] - pub outputs: Vec, - #[serde(default)] - pub cleanup: CleanupDeclarationsDto, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ProtectedRuleDto { - pub path: String, - pub protection_mode: ProtectionModeDto, - pub reason: String, - pub source: String, - pub matcher: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupSnapshot { - pub workspace_dir: String, - pub aindex_dir: Option, - #[serde(default)] - pub project_roots: Vec, - #[serde(default)] - pub protected_rules: Vec, - #[serde(default)] - pub plugin_snapshots: Vec, - /// Glob patterns from aindex.config.ts that should be excluded from - /// the empty-directory scanner (git-style ** patterns supported). - #[serde(default)] - pub empty_dir_exclude_globs: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ProtectedPathViolationDto { - pub target_path: String, - pub protected_path: String, - pub protection_mode: ProtectionModeDto, - pub reason: String, - pub source: String, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupProtectionConflictDto { - pub output_path: String, - pub output_plugin: String, - pub protected_path: String, - pub protection_mode: ProtectionModeDto, - pub protected_by: String, - pub reason: String, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupPlan { - pub files_to_delete: Vec, - pub dirs_to_delete: Vec, - pub empty_dirs_to_delete: Vec, - pub violations: Vec, - pub conflicts: Vec, - pub excluded_scan_globs: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupErrorDto { - pub path: String, - pub kind: CleanupErrorKindDto, - pub error: String, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupExecutionResultDto { - pub deleted_files: usize, - pub deleted_dirs: usize, - pub errors: Vec, - pub violations: Vec, - pub conflicts: Vec, - pub files_to_delete: Vec, - pub dirs_to_delete: Vec, - pub empty_dirs_to_delete: Vec, - pub excluded_scan_globs: Vec, -} - #[derive(Debug, Clone)] struct CompiledProtectedRule { path: String, @@ -539,7 +401,7 @@ impl BatchedGlobPlanner { .count(); let glob_pattern_count = self.normalized_patterns.len() - literal_pattern_count; - crate::log_debug!( + crate::debug!( logger, "cleanup native glob execute started", json!({ @@ -600,7 +462,7 @@ impl BatchedGlobPlanner { literal_match_count += 1; } - crate::log_debug!( + crate::debug!( logger, "cleanup native glob literal processing complete", json!({ @@ -686,7 +548,7 @@ impl BatchedGlobPlanner { } } - crate::log_debug!( + crate::debug!( logger, "cleanup native glob group walks complete", json!({ @@ -698,7 +560,7 @@ impl BatchedGlobPlanner { ); // Convert HashMaps to sorted Vecs and deduplicate - crate::log_debug!( + crate::debug!( logger, "cleanup native glob result compaction started", json!({}) @@ -723,7 +585,7 @@ impl BatchedGlobPlanner { .collect(); delete_vec.sort_by_key(|(idx, _)| *idx); - crate::log_debug!( + crate::debug!( logger, "cleanup native glob result compaction complete", json!({ @@ -1486,7 +1348,7 @@ fn default_protection_mode_for_target(target: &CleanupTargetDto) -> ProtectionMo pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { let logger = create_logger("CleanupNative", None); - crate::log_trace!( + crate::trace!( logger, "cleanup native plan started", json!({ @@ -1578,7 +1440,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { } } - crate::log_trace!( + crate::trace!( logger, "cleanup native plan inventory collected", json!({ @@ -1615,7 +1477,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { } // Execute the batched glob expansion - crate::log_trace!( + crate::trace!( logger, "cleanup native glob expansion started", json!({ @@ -1633,7 +1495,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { .iter() .map(|(_, paths)| paths.len()) .sum::(); - crate::log_trace!( + crate::trace!( logger, "cleanup native glob expansion complete", json!({ @@ -1673,7 +1535,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { let guard = create_guard(&snapshot, &protected_rules)?; let conflicts = detect_cleanup_protection_conflicts(&output_path_owners, &guard); if !conflicts.is_empty() { - crate::log_trace!( + crate::trace!( logger, "cleanup native plan blocked", json!({ @@ -1693,7 +1555,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { let file_candidates = delete_files.into_iter().collect::>(); let dir_candidates = delete_dirs.into_iter().collect::>(); - crate::log_trace!( + crate::trace!( logger, "cleanup native file partition started", json!({ @@ -1703,7 +1565,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { ); let file_partition = partition_deletion_targets(&file_candidates, &guard, Some(&exact_safe_file_paths)); - crate::log_trace!( + crate::trace!( logger, "cleanup native file partition complete", json!({ @@ -1712,7 +1574,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { "violationCount": file_partition.violations.len(), }) ); - crate::log_trace!( + crate::trace!( logger, "cleanup native directory partition started", json!({ @@ -1721,7 +1583,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { }) ); let dir_partition = partition_deletion_targets(&dir_candidates, &guard, None); - crate::log_trace!( + crate::trace!( logger, "cleanup native directory partition complete", json!({ @@ -1730,14 +1592,14 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { "violationCount": dir_partition.violations.len(), }) ); - crate::log_trace!( + crate::trace!( logger, "cleanup native target compaction started", json!({}) ); let (files_to_delete, dirs_to_delete) = compact_deletion_targets(&file_partition.safe_paths, &dir_partition.safe_paths); - crate::log_trace!( + crate::trace!( logger, "cleanup native target compaction complete", json!({ @@ -1745,7 +1607,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { "compactedDirs": dirs_to_delete.len(), }) ); - crate::log_trace!( + crate::trace!( logger, "cleanup native target partition complete", json!({ @@ -1778,7 +1640,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { .map(|pattern| normalize_relative_glob_pattern(pattern)) .collect::>(), )?; - crate::log_trace!( + crate::trace!( logger, "cleanup native empty directory planning started", json!({ @@ -1806,7 +1668,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { .into_iter() .filter(|violation| !target_matches_project_root(&violation.target_path, &project_root_keys)) .collect::>(); - crate::log_trace!( + crate::trace!( logger, "cleanup native empty directory planning complete", json!({ @@ -1820,7 +1682,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { violations.extend(empty_dir_violations); violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); - crate::log_debug!( + crate::debug!( logger, "cleanup native plan complete", json!({ @@ -1844,10 +1706,10 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { pub fn perform_cleanup(snapshot: CleanupSnapshot) -> Result { let logger = create_logger("CleanupNative", None); - crate::log_trace!(logger, "cleanup native perform started", json!({})); + crate::trace!(logger, "cleanup native perform started", json!({})); let plan = plan_cleanup(snapshot)?; if !plan.conflicts.is_empty() || !plan.violations.is_empty() { - crate::log_trace!( + crate::trace!( logger, "cleanup native perform blocked", json!({ @@ -1868,7 +1730,7 @@ pub fn perform_cleanup(snapshot: CleanupSnapshot) -> Result Result Result Result Result Result Result Option { - let dot_git = project_dir.join(".git"); - if !dot_git.exists() { - return None; - } - - let metadata = fs::symlink_metadata(&dot_git).ok()?; - if metadata.is_dir() { - return Some(dot_git.join("info")); - } - - if metadata.is_file() { - let content = fs::read_to_string(&dot_git).ok()?; - for line in content.lines() { - let line = line.trim(); - if let Some(gitdir) = line.strip_prefix("gitdir:") { - let gitdir = Path::new(gitdir.trim()); - let resolved = if gitdir.is_absolute() { - gitdir.to_path_buf() - } else { - project_dir.join(gitdir) - }; - return Some(resolved.join("info")); - } - } - } - - None -} - -const SKIP_DIRS: &[&str] = &["node_modules", ".turbo", "dist", "build", "out", ".cache"]; - -/// Recursively discovers all `.git` entries (directories or files) under a given root, -/// skipping common non-source directories. Returns absolute paths of directories -/// containing a `.git` entry. The `root_dir` itself is excluded from results. -pub fn find_all_git_repos(root_dir: &Path, max_depth: usize) -> Vec { - let mut results = Vec::new(); - - fn walk(dir: &Path, root_dir: &Path, depth: usize, max_depth: usize, results: &mut Vec) { - if depth > max_depth { - return; - } - - let entries = match fs::read_dir(dir) { - Ok(e) => e, - Err(_) => return, - }; - - let mut has_git = false; - let mut subdirs = Vec::new(); - - for entry in entries.flatten() { - let name = entry.file_name(); - let name_str = name.to_string_lossy(); - if name_str == ".git" { - has_git = true; - continue; - } - if let Ok(ft) = entry.file_type() - && ft.is_dir() - && !SKIP_DIRS.contains(&name_str.as_ref()) - { - subdirs.push(entry.path()); - } - } - - if has_git && dir != root_dir { - results.push(dir.to_path_buf()); - } - - for subdir in subdirs { - walk(&subdir, root_dir, depth + 1, max_depth, results); - } - } - - walk(root_dir, root_dir, 0, max_depth, &mut results); - results -} - /// Scans `.git/modules/` directory recursively to find all submodule `info/` dirs. /// Handles nested submodules (modules within modules). Returns absolute paths of /// `info/` directories. @@ -169,110 +84,6 @@ mod tests { use std::fs; use tempfile::TempDir; - #[test] - fn test_resolve_git_info_dir_for_regular_repo() { - let tmp = TempDir::new().unwrap(); - let dot_git = tmp.path().join(".git"); - fs::create_dir_all(&dot_git).unwrap(); - - let result = resolve_git_info_dir(tmp.path()); - assert_eq!(result, Some(dot_git.join("info"))); - } - - #[test] - fn test_resolve_git_info_dir_for_gitlink() { - let tmp = TempDir::new().unwrap(); - let dot_git = tmp.path().join(".git"); - fs::write(&dot_git, "gitdir: /absolute/path/to/git\n").unwrap(); - - let result = resolve_git_info_dir(tmp.path()); - assert_eq!(result, Some(PathBuf::from("/absolute/path/to/git/info"))); - } - - #[test] - fn test_resolve_git_info_dir_for_relative_gitlink() { - let tmp = TempDir::new().unwrap(); - let dot_git = tmp.path().join(".git"); - fs::write(&dot_git, "gitdir: ../.git/modules/foo\n").unwrap(); - - let result = resolve_git_info_dir(tmp.path()); - assert_eq!( - result, - Some( - tmp - .path() - .join("..") - .join(".git") - .join("modules") - .join("foo") - .join("info") - .canonicalize() - .unwrap_or_else(|_| tmp - .path() - .join("..") - .join(".git") - .join("modules") - .join("foo") - .join("info")) - ) - ); - } - - #[test] - fn test_resolve_git_info_dir_missing() { - let tmp = TempDir::new().unwrap(); - assert_eq!(resolve_git_info_dir(tmp.path()), None); - } - - #[test] - fn test_find_all_git_repos_finds_nested() { - let tmp = TempDir::new().unwrap(); - let root = tmp.path(); - let child = root.join("packages").join("app"); - fs::create_dir_all(root.join(".git")).unwrap(); - fs::create_dir_all(child.join(".git")).unwrap(); - - let result = find_all_git_repos(root, 5); - assert_eq!(result.len(), 1); - assert_eq!(result[0], child); - } - - #[test] - fn test_find_all_git_repos_excludes_root() { - let tmp = TempDir::new().unwrap(); - let root = tmp.path(); - fs::create_dir_all(root.join(".git")).unwrap(); - - let result = find_all_git_repos(root, 5); - assert!(result.is_empty()); - } - - #[test] - fn test_find_all_git_repos_skips_skip_dirs() { - let tmp = TempDir::new().unwrap(); - let root = tmp.path(); - let node_modules = root.join("node_modules").join("some-lib"); - fs::create_dir_all(node_modules.join(".git")).unwrap(); - - let result = find_all_git_repos(root, 5); - assert!(result.is_empty()); - } - - #[test] - fn test_find_all_git_repos_respects_max_depth() { - let tmp = TempDir::new().unwrap(); - let root = tmp.path(); - let deep = root.join("a").join("b").join("c").join("d"); - fs::create_dir_all(deep.join(".git")).unwrap(); - - let result = find_all_git_repos(root, 3); - assert!(result.is_empty()); - - let result = find_all_git_repos(root, 4); - assert_eq!(result.len(), 1); - assert_eq!(result[0], deep); - } - #[test] fn test_find_git_module_info_dirs_finds_submodules() { let tmp = TempDir::new().unwrap(); diff --git a/sdk/src/policy/path_blocking.rs b/sdk/src/policy/path_blocking.rs index cd6a2ae1..8036427b 100644 --- a/sdk/src/policy/path_blocking.rs +++ b/sdk/src/policy/path_blocking.rs @@ -37,7 +37,12 @@ pub fn find_blocking_non_directory_path(expected_dir_path: &str) -> Option Result { + let logger = create_logger("clean", options.log_level.as_deref().and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s))); + let _span = logger.span("command.clean").enter(); + + logger.info("Clean started", None); + let cwd = resolve_cwd(options.cwd.as_deref())?; + + let config_span = logger.span("config.load").enter(); let config_result = load_config(&cwd, options.load_user_config)?; + config_span.exit(); + let workspace_dir = resolve_workspace_dir(&cwd, &config_result.config)?; let workspace_warning = build_workspace_mismatch_warning(&cwd, &workspace_dir, &config_result); let workspace_dir_str = workspace_dir.to_string_lossy().into_owned(); - let global_scope = build_global_scope(&config_result.config); - let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref()); + logger.info("Config loaded", Some(json!({ + "workspaceDir": &workspace_dir_str, + }))); + + let global_scope = crate::services::common::build_global_scope(&config_result.config); + let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref(), DefaultPluginKind::Clean); + + let context_span = logger.span("context.collect").enter(); + let context = collect_context(&workspace_dir_str, global_scope.as_ref(), &enabled_plugins, &logger)?; + context_span.exit(); - let context = collect_context(&workspace_dir_str, global_scope.as_ref())?; - let (output_map, cleanup_map) = build_output_map(&context, enabled_plugins)?; + logger.info("Context collected", Some(json!({ + "globalMemory": context.global_memory.is_some(), + }))); + + let discover_span = logger.span("cleanup.discover").enter(); + let (output_map, cleanup_map) = build_output_map(&context, enabled_plugins, &logger)?; let mut snapshot = build_cleanup_snapshot(&workspace_dir_str, &output_map, &cleanup_map)?; + discover_span.exit(); + + logger.info("Cleanup targets discovered", Some(json!({ + "pluginCount": snapshot.plugin_snapshots.len(), + "projectRoots": snapshot.project_roots.len(), + }))); - // 根据 cwd 限制清理作用域:当 cwd 位于 workspace 的某个子项目下时, - // 只清理该项目的文件,不触碰其他项目。 + // 根据 cwd 限制清理作用域 if let Some(scope) = resolve_project_scope(&cwd, &workspace_dir) { + logger.info("Project scope resolved", Some(json!({ + "scope": scope.to_string_lossy().to_string(), + }))); snapshot = filter_snapshot_by_scope(snapshot, &scope, &workspace_dir); } if options.dry_run.unwrap_or(false) { + let plan_span = logger.span("cleanup.plan").enter(); let plan = crate::policy::cleanup::plan_cleanup(snapshot.clone()) .map_err(|e| CliError::ExecutionError(e))?; + plan_span.exit(); + let mut warnings = workspace_warning.into_iter().collect::>(); warnings.extend(plan.violations.iter().map(|v| { json!({ @@ -44,6 +79,15 @@ pub fn clean(options: MemorySyncCommandOptions) -> Result Result>(); @@ -104,6 +151,15 @@ pub fn clean(options: MemorySyncCommandOptions) -> Result Result) -> Result { - match cwd { - Some(value) => Ok(config::resolve_workspace_dir(value)), - None => std::env::current_dir().map_err(CliError::IoError), - } -} +// --------------------------------------------------------------------------- +// Scope resolution +// --------------------------------------------------------------------------- -fn load_config( - cwd: &Path, - load_user_config: Option, -) -> Result { - if load_user_config == Some(false) { - return Ok(config::MergedConfigResult { - config: UserConfigFile::default(), - sources: vec![], - found: false, - }); - } - - let result = ConfigLoader::with_defaults() - .try_load(cwd) - .map_err(CliError::ConfigError)?; - - if !result.found { - let config_path = config::get_required_global_config_path() - .unwrap_or_else(|_| config::get_global_config_path()); - return Err(CliError::ConfigError(format!( - "Required config file not found at {}. Please create it before running tnmsc.", - config_path.display() - ))); - } +fn resolve_project_scope(cwd: &Path, workspace_dir: &Path) -> Option { + let cwd_norm = strip_unc_prefix(cwd); + let ws_norm = strip_unc_prefix(workspace_dir); - Ok(result) -} + let relative = cwd_norm.strip_prefix(&ws_norm).ok()?; -fn resolve_workspace_dir(_cwd: &Path, config: &UserConfigFile) -> Result { - match config.workspace_dir.as_deref() { - Some(dir) => Ok(config::resolve_workspace_dir(dir)), - None => Err(CliError::ConfigError( - "workspaceDir is required but was not configured. Please set workspaceDir in your .tnmsc.json config file.".to_string(), - )), + if relative.as_os_str().is_empty() { + return None; } + + let first_component = relative.components().next()?; + Some(ws_norm.join(first_component.as_os_str())) } -/// 检查路径是否在某个目录下(包含目录本身)。 fn is_path_under_directory(path: &str, directory: &Path) -> bool { let path_buf = Path::new(path); let path_normalized = if path_buf.is_absolute() { @@ -188,44 +217,6 @@ fn is_path_under_directory(path: &str, directory: &Path) -> bool { path_str == dir_str || path_str.starts_with(&format!("{}/", dir_str)) } -/// 根据 cwd 和 workspace_dir 解析项目作用域。 -/// -/// 如果 cwd 位于 workspace_dir 的某个直接子目录下,返回该子目录作为作用域。 -/// 如果 cwd 等于 workspace_dir 或位于 workspace_dir 之外,返回 None(清理全部)。 -/// 移除 Windows UNC 前缀 \\?\,使路径可以与其他非 canonicalize 路径比较。 -fn strip_unc_prefix(path: &Path) -> PathBuf { - let s = path.to_string_lossy(); - if let Some(stripped) = s.strip_prefix(r"\\?\") { - PathBuf::from(stripped) - } else { - path.to_path_buf() - } -} - -fn resolve_project_scope(cwd: &Path, workspace_dir: &Path) -> Option { - // workspace_dir 可能经过 canonicalize 带有 Windows UNC 前缀,需要统一格式 - let cwd_norm = strip_unc_prefix(cwd); - let ws_norm = strip_unc_prefix(workspace_dir); - - let relative = cwd_norm.strip_prefix(&ws_norm).ok()?; - - // 如果 cwd 等于 workspace_dir,不限制作用域 - if relative.as_os_str().is_empty() { - return None; - } - - // 取 workspace_dir 的直接子目录作为作用域 - let first_component = relative.components().next()?; - Some(ws_norm.join(first_component.as_os_str())) -} - -/// 根据作用域过滤 CleanupSnapshot。 -/// -/// 保留规则: -/// - 位于作用域目录下的路径(项目内文件) -/// - 位于 workspace 之外的路径(全局文件) -/// 过滤掉: -/// - 位于 workspace 内但不在作用域下的路径(其他项目文件) fn filter_snapshot_by_scope( mut snapshot: CleanupSnapshot, scope: &Path, @@ -255,346 +246,14 @@ fn filter_snapshot_by_scope( snapshot } -fn build_global_scope(config: &UserConfigFile) -> Option { - let mut scope = serde_json::Map::new(); - - let mut os = serde_json::Map::new(); - os.insert("platform".to_string(), json!(std::env::consts::OS)); - os.insert("arch".to_string(), json!(std::env::consts::ARCH)); - os.insert("name".to_string(), json!(std::env::consts::OS)); - scope.insert("os".to_string(), Value::Object(os)); - - if let Some(profile) = config.profile.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(name) = profile.name.as_ref() { - value.insert("name".to_string(), json!(name)); - } - if let Some(username) = profile.username.as_ref() { - value.insert("username".to_string(), json!(username)); - } - if let Some(gender) = profile.gender.as_ref() { - value.insert("gender".to_string(), json!(gender)); - } - if let Some(birthday) = profile.birthday.as_ref() { - value.insert("birthday".to_string(), json!(birthday)); - } - for (key, extra) in &profile.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("profile".to_string(), Value::Object(value)); - } - } - - if let Some(code_styles) = config.code_styles.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(indent) = code_styles.indent { - value.insert( - "indent".to_string(), - json!(match indent { - config::CodeStyleIndent::Tab => "tab", - config::CodeStyleIndent::Space => "space", - }), - ); - } - if let Some(tab_size) = code_styles.tab_size { - value.insert("tabSize".to_string(), json!(tab_size)); - } - for (key, extra) in &code_styles.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("codeStyles".to_string(), Value::Object(value)); - } - } - - let mut tool = serde_json::Map::new(); - tool.insert("name".to_string(), json!("tnmsc")); - tool.insert("version".to_string(), json!(crate::version())); - scope.insert("tool".to_string(), Value::Object(tool)); - - (!scope.is_empty()).then(|| Value::Object(scope)) -} - -#[derive(Debug, Clone, Copy, Default)] -struct EnabledPlugins { - agents_md: bool, - claude_code: bool, - codex: bool, - cursor: bool, - droid: bool, - gemini: bool, - git: bool, - jetbrains: bool, - jetbrains_code_style: bool, - kiro: bool, - opencode: bool, - qoder: bool, - readme: bool, - trae: bool, - trae_cn: bool, - vscode: bool, - warp: bool, - windsurf: bool, - zed: bool, -} - -impl EnabledPlugins { - fn from_config(config: Option<&PluginsConfig>) -> Self { - Self { - agents_md: config.and_then(|value| value.agents_md).unwrap_or(true), - claude_code: config.and_then(|value| value.claude_code).unwrap_or(true), - codex: config.and_then(|value| value.codex).unwrap_or(false), - cursor: config.and_then(|value| value.cursor).unwrap_or(false), - droid: config.and_then(|value| value.droid).unwrap_or(false), - gemini: config.and_then(|value| value.gemini).unwrap_or(false), - git: config.and_then(|value| value.git).unwrap_or(true), - jetbrains: config.and_then(|value| value.jetbrains).unwrap_or(false), - jetbrains_code_style: config - .and_then(|value| value.jetbrains_code_style) - .unwrap_or(false), - kiro: config.and_then(|value| value.kiro).unwrap_or(false), - opencode: config.and_then(|value| value.opencode).unwrap_or(true), - qoder: config.and_then(|value| value.qoder).unwrap_or(false), - readme: config.and_then(|value| value.readme).unwrap_or(true), - trae: config.and_then(|value| value.trae).unwrap_or(false), - trae_cn: config.and_then(|value| value.trae_cn).unwrap_or(false), - vscode: config.and_then(|value| value.vscode).unwrap_or(false), - warp: config.and_then(|value| value.warp).unwrap_or(false), - windsurf: config.and_then(|value| value.windsurf).unwrap_or(false), - zed: config.and_then(|value| value.zed).unwrap_or(false), - } - } - - fn is_enabled(self, plugin_name: &str) -> bool { - match plugin_name { - "AgentsOutputAdaptor" => self.agents_md, - "GitExcludeOutputAdaptor" => self.git, - "JetBrainsIDECodeStyleConfigOutputAdaptor" => self.jetbrains_code_style, - "VisualStudioCodeIDEConfigOutputAdaptor" => self.vscode, - "ZedIDEConfigOutputAdaptor" => self.zed, - "ReadmeMdConfigFileOutputAdaptor" => self.readme, - "ClaudeCodeCLIOutputAdaptor" => self.claude_code, - "CodexCLIOutputAdaptor" => self.codex, - "CursorOutputAdaptor" => self.cursor, - "DroidCLIOutputAdaptor" => self.droid, - "GeminiCLIOutputAdaptor" => self.gemini, - "JetBrainsAIAssistantCodexOutputAdaptor" => self.jetbrains, - "KiroCLIOutputAdaptor" => self.kiro, - "OpencodeCLIOutputAdaptor" => self.opencode, - "QoderIDEPluginOutputAdaptor" => self.qoder, - "TraeOutputAdaptor" => self.trae || self.trae_cn, - "WarpIDEOutputAdaptor" => self.warp, - "WindsurfOutputAdaptor" => self.windsurf, - _ => false, - } - } -} - -fn collect_context( - workspace_dir: &str, - _global_scope: Option<&Value>, -) -> Result { - #[derive(Debug, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct WorkspaceEnvelope { - workspace: crate::domain::plugin_shared::Workspace, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct GlobalMemoryEnvelope { - #[serde(default)] - global_memory: Option, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct CommandsEnvelope { - #[serde(default)] - commands: Vec, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct SubAgentsEnvelope { - #[serde(default)] - sub_agents: Vec, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct SkillsEnvelope { - #[serde(default)] - skills: Vec, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct RulesEnvelope { - #[serde(default)] - rules: Vec, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct ReadmeEnvelope { - #[serde(default)] - readme_prompts: Vec, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct GitIgnoreEnvelope { - #[serde(default)] - global_git_ignore: Option, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct GitExcludeEnvelope { - #[serde(default)] - shadow_git_exclude: Option, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct SharedIgnoreEnvelope { - #[serde(default)] - ai_agent_ignore_config_files: - Option>, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct VSCodeEnvelope { - #[serde(default)] - vscode_config_files: Option>, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct ZedEnvelope { - #[serde(default)] - zed_config_files: Option>, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct JetBrainsEnvelope { - #[serde(default)] - jetbrains_config_files: Option>, - } - - #[derive(Debug, Default, serde::Deserialize)] - #[serde(rename_all = "camelCase")] - struct EditorConfigEnvelope { - #[serde(default)] - editor_config_files: Option>, - } - - fn collect_json( - collector: impl Fn(&str) -> Result, - input: Value, - ) -> Result - where - T: serde::de::DeserializeOwned, - { - let raw = collector(&input.to_string())?; - serde_json::from_str(&raw).map_err(CliError::SerializationError) - } - - let aindex = collect_json::( - crate::repositories::aindex_resolvers::collect_aindex_resolvers, - json!({ "workspaceDir": workspace_dir }), - )?; - - let project_prompts = collect_json::( - crate::repositories::project_prompt::collect_project_prompt, - json!({ - "workspaceDir": workspace_dir, - "workspace": aindex.workspace, - "globalScope": None::, - }), - )?; - - let global_memory = collect_json::( - crate::repositories::global_memory::collect_global_memory, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - - let commands = collect_json::( - crate::repositories::command::collect_command, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let sub_agents = collect_json::( - crate::repositories::subagent::collect_subagent, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let skills = collect_json::( - crate::repositories::skill::collect_skill, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let rules = collect_json::( - crate::repositories::rule::collect_rule, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let readme = collect_json::( - crate::repositories::readme::collect_readme, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let gitignore = collect_json::( - crate::repositories::gitignore::collect_gitignore, - json!({ "workspaceDir": workspace_dir }), - )?; - let git_exclude = collect_json::( - crate::repositories::git_exclude::collect_git_exclude, - json!({ "workspaceDir": workspace_dir }), - )?; - let shared_ignore = collect_json::( - crate::repositories::shared_ignore::collect_shared_ignore, - json!({ "workspaceDir": workspace_dir }), - )?; - let vscode = collect_json::( - crate::repositories::vscode_config::collect_vscode_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let zed = collect_json::( - crate::repositories::zed_config::collect_zed_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let jetbrains = collect_json::( - crate::repositories::jetbrains_config::collect_jetbrains_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let editor_config = collect_json::( - crate::repositories::editorconfig::collect_editorconfig, - json!({ "workspaceDir": workspace_dir }), - )?; - - Ok(OutputContext { - workspace: Some(project_prompts.workspace), - vscode_config_files: vscode.vscode_config_files, - zed_config_files: zed.zed_config_files, - jetbrains_config_files: jetbrains.jetbrains_config_files, - editor_config_files: editor_config.editor_config_files, - fast_commands: (!commands.commands.is_empty()).then_some(commands.commands), - sub_agents: (!sub_agents.sub_agents.is_empty()).then_some(sub_agents.sub_agents), - skills: (!skills.skills.is_empty()).then_some(skills.skills), - rules: (!rules.rules.is_empty()).then_some(rules.rules), - global_memory: global_memory.global_memory, - global_git_ignore: gitignore.global_git_ignore, - shadow_git_exclude: git_exclude.shadow_git_exclude, - shadow_source_project_dir: None, - readme_prompts: (!readme.readme_prompts.is_empty()).then_some(readme.readme_prompts), - ai_agent_ignore_config_files: shared_ignore.ai_agent_ignore_config_files, - registered_output_plugins: None, - }) -} +// --------------------------------------------------------------------------- +// Output map building +// --------------------------------------------------------------------------- fn build_output_map( - context: &OutputContext, + context: &crate::context::OutputContext, enabled_plugins: EnabledPlugins, + logger: &Logger, ) -> Result< ( HashMap>, @@ -605,11 +264,9 @@ fn build_output_map( let mut output_map: HashMap> = HashMap::new(); let mut cleanup_map: HashMap = HashMap::new(); + let base_span = logger.span("output.build").enter(); let base_plans = crate::domain::base_output_plans::build_base_output_plans(context)?; for plan in &base_plans.plugins { - // Cleanup targets are always collected regardless of plugin enablement. - // This ensures `tnmsc clean` removes stale files even when a plugin - // has been disabled after previously being enabled. cleanup_map .entry(plan.plugin_name.clone()) .or_insert_with(CleanupDeclarationsDto::default) @@ -625,203 +282,88 @@ fn build_output_map( } } - if let Ok(plan) = - crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context) - { - cleanup_map - .entry("ClaudeCodeCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + // Build plugin-specific output maps + if let Ok(plan) = crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context) { + cleanup_map.entry("ClaudeCodeCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.claude_code { - for file in &plan.output_files { - output_map - .entry("ClaudeCodeCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("ClaudeCodeCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = crate::domain::output_plans::codex_output_plan::build_codex_output_plan(context) - { - cleanup_map - .entry("CodexCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::codex_output_plan::build_codex_output_plan(context) { + cleanup_map.entry("CodexCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.codex { - for file in &plan.output_files { - output_map - .entry("CodexCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("CodexCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = - crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(context) - { - cleanup_map - .entry("CursorOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(context) { + cleanup_map.entry("CursorOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.cursor { - for file in &plan.output_files { - output_map - .entry("CursorOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("CursorOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = crate::domain::output_plans::droid_output_plan::build_droid_output_plan(context) - { - cleanup_map - .entry("DroidCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::droid_output_plan::build_droid_output_plan(context) { + cleanup_map.entry("DroidCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.droid { - for file in &plan.output_files { - output_map - .entry("DroidCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("DroidCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = - crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(context) - { - cleanup_map - .entry("GeminiCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(context) { + cleanup_map.entry("GeminiCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.gemini { - for file in &plan.output_files { - output_map - .entry("GeminiCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("GeminiCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } if let Ok(plan) = crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context) { - cleanup_map - .entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + cleanup_map.entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.jetbrains { - for file in &plan.output_files { - output_map - .entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("JetBrainsAIAssistantCodexOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } if let Ok(plan) = crate::domain::output_plans::kiro_output_plan::build_kiro_output_plan(context) { - cleanup_map - .entry("KiroCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + cleanup_map.entry("KiroCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.kiro { - for file in &plan.output_files { - output_map - .entry("KiroCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("KiroCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = - crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context) - { - cleanup_map - .entry("OpencodeCLIOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context) { + cleanup_map.entry("OpencodeCLIOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.opencode { - for file in &plan.output_files { - output_map - .entry("OpencodeCLIOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("OpencodeCLIOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = crate::domain::output_plans::qoder_output_plan::build_qoder_output_plan(context) - { - cleanup_map - .entry("QoderIDEPluginOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::qoder_output_plan::build_qoder_output_plan(context) { + cleanup_map.entry("QoderIDEPluginOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.qoder { - for file in &plan.output_files { - output_map - .entry("QoderIDEPluginOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("QoderIDEPluginOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } if let Ok(plan) = crate::domain::output_plans::trae_output_plan::build_trae_output_plan(context) { - cleanup_map - .entry("TraeOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + cleanup_map.entry("TraeOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.trae || enabled_plugins.trae_cn { - for file in &plan.output_files { - output_map - .entry("TraeOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("TraeOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } if let Ok(plan) = crate::domain::output_plans::warp_output_plan::build_warp_output_plan(context) { - cleanup_map - .entry("WarpIDEOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + cleanup_map.entry("WarpIDEOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.warp { - for file in &plan.output_files { - output_map - .entry("WarpIDEOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("WarpIDEOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } - if let Ok(plan) = - crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context) - { - cleanup_map - .entry("WindsurfOutputAdaptor".to_string()) - .or_insert_with(CleanupDeclarationsDto::default) - .delete - .extend(plan.cleanup.delete.clone()); + if let Ok(plan) = crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context) { + cleanup_map.entry("WindsurfOutputAdaptor".to_string()).or_insert_with(CleanupDeclarationsDto::default).delete.extend(plan.cleanup.delete.clone()); if enabled_plugins.windsurf { - for file in &plan.output_files { - output_map - .entry("WindsurfOutputAdaptor".to_string()) - .or_default() - .push(file.path.clone()); - } + for file in &plan.output_files { output_map.entry("WindsurfOutputAdaptor".to_string()).or_default().push(file.path.clone()); } } } + base_span.exit(); Ok((output_map, cleanup_map)) } +// --------------------------------------------------------------------------- +// Cleanup snapshot +// --------------------------------------------------------------------------- + fn build_cleanup_snapshot( workspace_dir: &str, output_map: &HashMap>, @@ -829,8 +371,6 @@ fn build_cleanup_snapshot( ) -> Result { let mut plugin_snapshots = Vec::new(); - // Include all plugins that have either outputs or cleanup targets. - // This ensures disabled plugins still contribute their cleanup declarations. let mut all_plugin_names: Vec<&String> = output_map.keys().collect(); for name in cleanup_map.keys() { if !all_plugin_names.contains(&name) { @@ -859,7 +399,7 @@ fn build_cleanup_snapshot( let mut delete_targets = Vec::new(); for root_path in &project_roots { - let root = std::path::Path::new(root_path); + let root = Path::new(root_path); let agents_path = root.join("AGENTS.md"); let claude_path = root.join("CLAUDE.md"); let agt_path = root.join("agt.mdx"); @@ -915,7 +455,7 @@ fn build_cleanup_snapshot( } fn discover_project_roots(workspace_dir: &str) -> Vec { - let ws_path = std::path::Path::new(workspace_dir); + let ws_path = Path::new(workspace_dir); let mut roots = Vec::new(); if let Ok(entries) = std::fs::read_dir(ws_path) { @@ -937,12 +477,16 @@ fn discover_project_roots(workspace_dir: &str) -> Vec { roots } +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + #[cfg(test)] mod tests { use super::*; use tempfile::TempDir; - fn with_home_dir(home_dir: &Path, callback: impl FnOnce() -> T) -> T { + fn with_home_dir(home_dir: &std::path::Path, callback: impl FnOnce() -> T) -> T { let _guard = match crate::domain::TEST_ENV_LOCK.lock() { Ok(g) => g, Err(error) => error.into_inner(), @@ -967,7 +511,7 @@ mod tests { result } - fn create_test_config(home_dir: &Path, workspace_dir: &Path) -> std::io::Result<()> { + fn create_test_config(home_dir: &std::path::Path, workspace_dir: &std::path::Path) -> std::io::Result<()> { let config_content = json!({ "workspaceDir": workspace_dir.to_string_lossy() }); @@ -1061,7 +605,7 @@ mod tests { #[test] fn clean_enabled_plugins_from_empty_config() { - let plugins = EnabledPlugins::from_config(None); + let plugins = EnabledPlugins::from_config(None, DefaultPluginKind::Clean); assert!(plugins.agents_md); assert!(plugins.claude_code); assert!(plugins.git); @@ -1070,13 +614,13 @@ mod tests { #[test] fn clean_enabled_plugins_respects_config() { - let config = PluginsConfig { + let config = crate::domain::config::PluginsConfig { git: Some(false), readme: Some(false), claude_code: Some(true), ..Default::default() }; - let plugins = EnabledPlugins::from_config(Some(&config)); + let plugins = EnabledPlugins::from_config(Some(&config), DefaultPluginKind::Clean); assert!(!plugins.git); assert!(!plugins.readme); assert!(plugins.claude_code); @@ -1084,7 +628,7 @@ mod tests { #[test] fn clean_plugin_name_matching() { - let plugins = EnabledPlugins::from_config(None); + let plugins = EnabledPlugins::from_config(None, DefaultPluginKind::Clean); assert!(plugins.is_enabled("GitExcludeOutputAdaptor")); assert!(plugins.is_enabled("ReadmeMdConfigFileOutputAdaptor")); assert!(plugins.is_enabled("ClaudeCodeCLIOutputAdaptor")); @@ -1121,19 +665,10 @@ mod tests { assert!(snapshot.aindex_dir.unwrap().contains("aindex")); } - /// 回归测试:clean 必须始终收集所有插件的 cleanup targets,无论插件是否启用。 - /// - /// 设计原因:用户可能在禁用某个插件之前已经运行过 install,导致该插件生成的文件 - /// 仍然残留在项目或全局目录中。如果 clean 也跟随插件开关,则这些残留文件将永远 - /// 无法被自动清理,造成"清爽的编程上下文环境"被破坏。 - /// - /// 因此,install 行为受插件开关控制(只生成启用插件的文件),而 clean 行为 - /// 不受插件开关控制(总是清理所有已知插件的输出文件)。 #[test] fn clean_snapshot_includes_disabled_plugin_cleanup_targets() { let workspace_dir = "/tmp/test-workspace"; let output_map: HashMap> = HashMap::new(); - // 模拟 agents_md 被禁用:没有 outputs,但有 cleanup targets let mut cleanup_map: HashMap = HashMap::new(); cleanup_map.insert( "AgentsOutputAdaptor".to_string(), @@ -1173,7 +708,6 @@ mod tests { ); } - /// 回归测试:build_cleanup_snapshot 应同时包含 output_map 和 cleanup_map 中的插件。 #[test] fn clean_snapshot_collects_from_both_maps() { let workspace_dir = "/tmp/test-workspace"; @@ -1336,13 +870,14 @@ mod tests { fn clean_filter_snapshot_by_scope_filters_project_roots() { let temp_dir = TempDir::new().unwrap(); let ws = temp_dir.path(); - // 创建真实的项目目录,discover_project_roots 需要读取实际文件系统 std::fs::create_dir_all(ws.join("project-a")).unwrap(); std::fs::create_dir_all(ws.join("project-b")).unwrap(); let scope = ws.join("project-a"); - let snapshot = - build_cleanup_snapshot(&ws.to_string_lossy(), &HashMap::new(), &HashMap::new()).unwrap(); + let snapshot = build_cleanup_snapshot(&ws.to_string_lossy(), + &HashMap::new(), + &HashMap::new(), + ).unwrap(); let filtered = filter_snapshot_by_scope(snapshot, &scope, ws); diff --git a/sdk/src/services/command_diagnostics.rs b/sdk/src/services/command_diagnostics_service.rs similarity index 92% rename from sdk/src/services/command_diagnostics.rs rename to sdk/src/services/command_diagnostics_service.rs index 2a51c557..a9dcf328 100644 --- a/sdk/src/services/command_diagnostics.rs +++ b/sdk/src/services/command_diagnostics_service.rs @@ -1,8 +1,9 @@ -use std::path::{Path, PathBuf}; +use std::path::Path; use serde_json::{Value, json}; use crate::domain::config::MergedConfigResult; +use crate::services::common::strip_unc_prefix; pub(crate) fn build_workspace_mismatch_warning( cwd: &Path, @@ -42,15 +43,6 @@ fn normalize_display_path(path: &Path) -> String { strip_unc_prefix(path).to_string_lossy().into_owned() } -fn strip_unc_prefix(path: &Path) -> PathBuf { - let value = path.to_string_lossy(); - if let Some(stripped) = value.strip_prefix(r"\\?\") { - PathBuf::from(stripped) - } else { - path.to_path_buf() - } -} - #[cfg(test)] mod tests { use std::path::PathBuf; diff --git a/sdk/src/services/common.rs b/sdk/src/services/common.rs new file mode 100644 index 00000000..f9e23bc7 --- /dev/null +++ b/sdk/src/services/common.rs @@ -0,0 +1,521 @@ +use std::path::{Path, PathBuf}; + +use serde::de::DeserializeOwned; +use serde_json::{Value, json}; + +use crate::context::OutputContext; +use crate::domain::config::{self, ConfigLoader, PluginsConfig, UserConfigFile}; +use crate::infra::logger::Logger; +use crate::CliError; + +// --------------------------------------------------------------------------- +// Plugin defaults +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Copy)] +pub enum DefaultPluginKind { + Install, + DryRun, + Clean, +} + +#[derive(Debug, Clone, Copy, Default)] +pub struct EnabledPlugins { + pub agents_md: bool, + pub claude_code: bool, + pub codex: bool, + pub cursor: bool, + pub droid: bool, + pub gemini: bool, + pub git: bool, + pub jetbrains: bool, + pub jetbrains_code_style: bool, + pub kiro: bool, + pub opencode: bool, + pub qoder: bool, + pub readme: bool, + pub trae: bool, + pub trae_cn: bool, + pub vscode: bool, + pub warp: bool, + pub windsurf: bool, + pub zed: bool, +} + +impl EnabledPlugins { + pub fn from_config(config: Option<&PluginsConfig>, kind: DefaultPluginKind) -> Self { + let (claude_default, opencode_default) = match kind { + DefaultPluginKind::DryRun => (false, false), + _ => (true, true), + }; + + Self { + agents_md: config.and_then(|v| v.agents_md).unwrap_or(true), + claude_code: config.and_then(|v| v.claude_code).unwrap_or(claude_default), + codex: config.and_then(|v| v.codex).unwrap_or(false), + cursor: config.and_then(|v| v.cursor).unwrap_or(false), + droid: config.and_then(|v| v.droid).unwrap_or(false), + gemini: config.and_then(|v| v.gemini).unwrap_or(false), + git: config.and_then(|v| v.git).unwrap_or(true), + jetbrains: config.and_then(|v| v.jetbrains).unwrap_or(false), + jetbrains_code_style: config.and_then(|v| v.jetbrains_code_style).unwrap_or(false), + kiro: config.and_then(|v| v.kiro).unwrap_or(false), + opencode: config.and_then(|v| v.opencode).unwrap_or(opencode_default), + qoder: config.and_then(|v| v.qoder).unwrap_or(false), + readme: config.and_then(|v| v.readme).unwrap_or(true), + trae: config.and_then(|v| v.trae).unwrap_or(false), + trae_cn: config.and_then(|v| v.trae_cn).unwrap_or(false), + vscode: config.and_then(|v| v.vscode).unwrap_or(false), + warp: config.and_then(|v| v.warp).unwrap_or(false), + windsurf: config.and_then(|v| v.windsurf).unwrap_or(false), + zed: config.and_then(|v| v.zed).unwrap_or(false), + } + } + + pub fn is_enabled(self, plugin_name: &str) -> bool { + match plugin_name { + "AgentsOutputAdaptor" => self.agents_md, + "GitExcludeOutputAdaptor" => self.git, + "JetBrainsIDECodeStyleConfigOutputAdaptor" => self.jetbrains_code_style, + "VisualStudioCodeIDEConfigOutputAdaptor" => self.vscode, + "ZedIDEConfigOutputAdaptor" => self.zed, + "ReadmeMdConfigFileOutputAdaptor" => self.readme, + "ClaudeCodeCLIOutputAdaptor" => self.claude_code, + "CodexCLIOutputAdaptor" => self.codex, + "CursorOutputAdaptor" => self.cursor, + "DroidCLIOutputAdaptor" => self.droid, + "GeminiCLIOutputAdaptor" => self.gemini, + "JetBrainsAIAssistantCodexOutputAdaptor" => self.jetbrains, + "KiroCLIOutputAdaptor" => self.kiro, + "OpencodeCLIOutputAdaptor" => self.opencode, + "QoderIDEPluginOutputAdaptor" => self.qoder, + "TraeOutputAdaptor" => self.trae || self.trae_cn, + "WarpIDEOutputAdaptor" => self.warp, + "WindsurfOutputAdaptor" => self.windsurf, + _ => false, + } + } + + pub fn registered_plugins(self) -> Vec { + let mut plugins = Vec::new(); + for plugin_name in [ + "AgentsOutputAdaptor", + "GitExcludeOutputAdaptor", + "JetBrainsIDECodeStyleConfigOutputAdaptor", + "VisualStudioCodeIDEConfigOutputAdaptor", + "ZedIDEConfigOutputAdaptor", + "ReadmeMdConfigFileOutputAdaptor", + "ClaudeCodeCLIOutputAdaptor", + "CodexCLIOutputAdaptor", + "CursorOutputAdaptor", + "DroidCLIOutputAdaptor", + "GeminiCLIOutputAdaptor", + "JetBrainsAIAssistantCodexOutputAdaptor", + "KiroCLIOutputAdaptor", + "OpencodeCLIOutputAdaptor", + "QoderIDEPluginOutputAdaptor", + "TraeOutputAdaptor", + "WarpIDEOutputAdaptor", + "WindsurfOutputAdaptor", + ] { + if self.is_enabled(plugin_name) { + plugins.push(plugin_name.to_string()); + } + } + plugins + } +} + +// --------------------------------------------------------------------------- +// Envelopes for JSON deserialization +// --------------------------------------------------------------------------- + +#[derive(Debug, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WorkspaceEnvelope { + pub workspace: crate::domain::plugin_shared::Workspace, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GlobalMemoryEnvelope { + #[serde(default)] + pub global_memory: Option, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CommandsEnvelope { + #[serde(default)] + pub commands: Vec, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SubAgentsEnvelope { + #[serde(default)] + pub sub_agents: Vec, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SkillsEnvelope { + #[serde(default)] + pub skills: Vec, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RulesEnvelope { + #[serde(default)] + pub rules: Vec, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ReadmeEnvelope { + #[serde(default)] + pub readme_prompts: Vec, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GitIgnoreEnvelope { + #[serde(default)] + pub global_git_ignore: Option, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GitExcludeEnvelope { + #[serde(default)] + pub shadow_git_exclude: Option, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SharedIgnoreEnvelope { + #[serde(default)] + pub ai_agent_ignore_config_files: + Option>, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct VSCodeEnvelope { + #[serde(default)] + pub vscode_config_files: Option>, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ZedEnvelope { + #[serde(default)] + pub zed_config_files: Option>, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct JetBrainsEnvelope { + #[serde(default)] + pub jetbrains_config_files: Option>, +} + +#[derive(Debug, Default, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct EditorConfigEnvelope { + #[serde(default)] + pub editor_config_files: Option>, +} + +// --------------------------------------------------------------------------- +// Config helpers +// --------------------------------------------------------------------------- + +pub fn resolve_cwd(cwd: Option<&str>) -> Result { + match cwd { + Some(value) => Ok(config::resolve_workspace_dir(value)), + None => std::env::current_dir().map_err(CliError::IoError), + } +} + +pub fn load_config( + cwd: &Path, + load_user_config: Option, +) -> Result { + if load_user_config == Some(false) { + return Ok(config::MergedConfigResult { + config: UserConfigFile::default(), + sources: vec![], + found: false, + }); + } + + let result = ConfigLoader::with_defaults() + .try_load(cwd) + .map_err(CliError::ConfigError)?; + + if !result.found { + let config_path = config::get_required_global_config_path() + .unwrap_or_else(|_| config::get_global_config_path()); + return Err(CliError::ConfigError(format!( + "Required config file not found at {}. Please create it before running tnmsc.", + config_path.display() + ))); + } + + Ok(result) +} + +pub fn resolve_workspace_dir(_cwd: &Path, config: &UserConfigFile) -> Result { + match config.workspace_dir.as_deref() { + Some(dir) => Ok(config::resolve_workspace_dir(dir)), + None => Err(CliError::ConfigError( + "workspaceDir is required but was not configured. Please set workspaceDir in your .tnmsc.json config file.".to_string(), + )), + } +} + +pub fn build_global_scope(config: &UserConfigFile) -> Option { + let mut scope = serde_json::Map::new(); + + let mut os = serde_json::Map::new(); + os.insert("platform".to_string(), json!(std::env::consts::OS)); + os.insert("arch".to_string(), json!(std::env::consts::ARCH)); + os.insert("name".to_string(), json!(std::env::consts::OS)); + scope.insert("os".to_string(), Value::Object(os)); + + if let Some(profile) = config.profile.as_ref() { + let mut value = serde_json::Map::new(); + if let Some(name) = profile.name.as_ref() { + value.insert("name".to_string(), json!(name)); + } + if let Some(username) = profile.username.as_ref() { + value.insert("username".to_string(), json!(username)); + } + if let Some(gender) = profile.gender.as_ref() { + value.insert("gender".to_string(), json!(gender)); + } + if let Some(birthday) = profile.birthday.as_ref() { + value.insert("birthday".to_string(), json!(birthday)); + } + for (key, extra) in &profile.extra { + value.insert(key.clone(), extra.clone()); + } + if !value.is_empty() { + scope.insert("profile".to_string(), Value::Object(value)); + } + } + + if let Some(code_styles) = config.code_styles.as_ref() { + let mut value = serde_json::Map::new(); + if let Some(indent) = code_styles.indent { + value.insert( + "indent".to_string(), + json!(match indent { + config::CodeStyleIndent::Tab => "tab", + config::CodeStyleIndent::Space => "space", + }), + ); + } + if let Some(tab_size) = code_styles.tab_size { + value.insert("tabSize".to_string(), json!(tab_size)); + } + for (key, extra) in &code_styles.extra { + value.insert(key.clone(), extra.clone()); + } + if !value.is_empty() { + scope.insert("codeStyles".to_string(), Value::Object(value)); + } + } + + let mut tool = serde_json::Map::new(); + tool.insert("name".to_string(), json!("tnmsc")); + tool.insert("version".to_string(), json!(crate::version())); + scope.insert("tool".to_string(), Value::Object(tool)); + + (!scope.is_empty()).then(|| Value::Object(scope)) +} + +pub fn strip_unc_prefix(path: &Path) -> PathBuf { + let s = path.to_string_lossy(); + if let Some(stripped) = s.strip_prefix(r"\\?\") { + PathBuf::from(stripped) + } else { + path.to_path_buf() + } +} + +pub fn count_missing_directories(dir: &Path) -> usize { + let mut missing = Vec::new(); + let mut current = Some(dir); + + while let Some(path) = current { + if path.exists() { + break; + } + missing.push(path.to_path_buf()); + current = path.parent(); + } + + missing.len() +} + +// --------------------------------------------------------------------------- +// JSON collection helpers +// --------------------------------------------------------------------------- + +pub fn collect_json( + collector: impl Fn(&str) -> Result, + input: Value, +) -> Result +where + T: DeserializeOwned, +{ + let raw = collector(&input.to_string())?; + serde_json::from_str(&raw).map_err(CliError::SerializationError) +} + +// --------------------------------------------------------------------------- +// Context collection +// --------------------------------------------------------------------------- + +pub fn collect_context( + workspace_dir: &str, + global_scope: Option<&Value>, + enabled_plugins: &EnabledPlugins, + logger: &Logger, +) -> Result { + + let aindex = { + let _span = logger.span("collect.aindex_resolvers").enter(); + collect_json::( + crate::repositories::aindex_resolvers::collect_aindex_resolvers, + json!({ "workspaceDir": workspace_dir }), + )? + }; + + let project_prompts = { + let _span = logger.span("collect.project_prompt").enter(); + collect_json::( + crate::repositories::project_prompt::collect_project_prompt, + json!({ + "workspaceDir": workspace_dir, + "workspace": aindex.workspace, + "globalScope": global_scope, + }), + )? + }; + + let global_memory = { + let _span = logger.span("collect.global_memory").enter(); + collect_json::( + crate::repositories::global_memory::collect_global_memory, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + + let commands = { + let _span = logger.span("collect.command").enter(); + collect_json::( + crate::repositories::command::collect_command, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + let sub_agents = { + let _span = logger.span("collect.subagent").enter(); + collect_json::( + crate::repositories::subagent::collect_subagent, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + let skills = { + let _span = logger.span("collect.skill").enter(); + collect_json::( + crate::repositories::skill::collect_skill, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + let rules = { + let _span = logger.span("collect.rule").enter(); + collect_json::( + crate::repositories::rule::collect_rule, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + let readme = { + let _span = logger.span("collect.readme").enter(); + collect_json::( + crate::repositories::readme::collect_readme, + json!({ "workspaceDir": workspace_dir, "globalScope": global_scope }), + )? + }; + let gitignore = { + let _span = logger.span("collect.gitignore").enter(); + collect_json::( + crate::repositories::gitignore::collect_gitignore, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let git_exclude = { + let _span = logger.span("collect.git_exclude").enter(); + collect_json::( + crate::repositories::git_exclude::collect_git_exclude, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let shared_ignore = { + let _span = logger.span("collect.shared_ignore").enter(); + collect_json::( + crate::repositories::shared_ignore::collect_shared_ignore, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let vscode = { + let _span = logger.span("collect.vscode_config").enter(); + collect_json::( + crate::repositories::vscode_config::collect_vscode_config, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let zed = { + let _span = logger.span("collect.zed_config").enter(); + collect_json::( + crate::repositories::zed_config::collect_zed_config, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let jetbrains = { + let _span = logger.span("collect.jetbrains_config").enter(); + collect_json::( + crate::repositories::jetbrains_config::collect_jetbrains_config, + json!({ "workspaceDir": workspace_dir }), + )? + }; + let editor_config = { + let _span = logger.span("collect.editorconfig").enter(); + collect_json::( + crate::repositories::editorconfig::collect_editorconfig, + json!({ "workspaceDir": workspace_dir }), + )? + }; + + Ok(OutputContext { + workspace: Some(project_prompts.workspace), + vscode_config_files: vscode.vscode_config_files, + zed_config_files: zed.zed_config_files, + jetbrains_config_files: jetbrains.jetbrains_config_files, + editor_config_files: editor_config.editor_config_files, + fast_commands: (!commands.commands.is_empty()).then_some(commands.commands), + sub_agents: (!sub_agents.sub_agents.is_empty()).then_some(sub_agents.sub_agents), + skills: (!skills.skills.is_empty()).then_some(skills.skills), + rules: (!rules.rules.is_empty()).then_some(rules.rules), + global_memory: global_memory.global_memory, + global_git_ignore: gitignore.global_git_ignore, + shadow_git_exclude: git_exclude.shadow_git_exclude, + shadow_source_project_dir: None, + readme_prompts: (!readme.readme_prompts.is_empty()).then_some(readme.readme_prompts), + ai_agent_ignore_config_files: shared_ignore.ai_agent_ignore_config_files, + registered_output_plugins: Some(enabled_plugins.registered_plugins()), + }) +} diff --git a/sdk/src/services/dry_run_service.rs b/sdk/src/services/dry_run_service.rs index 5faf1436..6ac0777b 100644 --- a/sdk/src/services/dry_run_service.rs +++ b/sdk/src/services/dry_run_service.rs @@ -1,28 +1,70 @@ use std::collections::BTreeMap; -use std::path::{Path, PathBuf}; +use std::path::Path; -use serde_json::{Value, json}; +use serde_json::json; -use crate::context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPlansDto}; -use crate::domain::config::{self, ConfigLoader, PluginsConfig, UserConfigFile}; use crate::domain::output_plans::droid_output_plan::DroidOutputPlanDto; -use crate::services::command_diagnostics::build_workspace_mismatch_warning; +use crate::infra::logger::create_logger; +use crate::services::command_diagnostics_service::build_workspace_mismatch_warning; +use crate::services::common::{ + DefaultPluginKind, EnabledPlugins, collect_context, load_config, resolve_cwd, + resolve_workspace_dir, +}; use crate::{CliError, MemorySyncCommandOptions, MemorySyncCommandResult}; +#[derive(Debug, Clone)] +#[allow(dead_code)] +struct PlannedOutputFile { + path: String, + content: String, + encoding: Option, +} + pub fn dry_run(options: MemorySyncCommandOptions) -> Result { + let logger = create_logger("dry_run", options.log_level.as_deref().and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s))); + let _span = logger.span("command.dry_run").enter(); + + logger.info("Dry run started", None); + let cwd = resolve_cwd(options.cwd.as_deref())?; + + let config_span = logger.span("config.load").enter(); let config_result = load_config(&cwd, options.load_user_config)?; + config_span.exit(); + let workspace_dir = resolve_workspace_dir(&cwd, &config_result.config)?; let warnings = build_workspace_mismatch_warning(&cwd, &workspace_dir, &config_result) .into_iter() .collect(); let workspace_dir_str = workspace_dir.to_string_lossy().into_owned(); - let global_scope = build_global_scope(&config_result.config); - let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref()); - let context = collect_context(&workspace_dir_str, global_scope.as_ref())?; + logger.info("Config loaded", Some(json!({ + "workspaceDir": &workspace_dir_str, + "configFound": config_result.found, + }))); + + let global_scope = crate::services::common::build_global_scope(&config_result.config); + let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref(), DefaultPluginKind::DryRun); + + logger.info("Plugins resolved", Some(json!({ + "enabled": enabled_plugins.registered_plugins(), + }))); + + let context_span = logger.span("context.collect").enter(); + let context = collect_context(&workspace_dir_str, global_scope.as_ref(), &enabled_plugins, &logger)?; + context_span.exit(); + + logger.info("Context collected", Some(json!({ + "globalMemory": context.global_memory.is_some(), + "commands": context.fast_commands.as_ref().map(|v| v.len()), + "skills": context.skills.as_ref().map(|v| v.len()), + "rules": context.rules.as_ref().map(|v| v.len()), + }))); + + let output_span = logger.span("output.build").enter(); let planned_outputs = build_output_files(&context, enabled_plugins)?; + output_span.exit(); let mut files_affected = 0usize; let mut dirs_affected = 0usize; @@ -31,12 +73,17 @@ pub fn dry_run(options: MemorySyncCommandOptions) -> Result Result) -> Result { - match cwd { - Some(value) => Ok(config::resolve_workspace_dir(value)), - None => std::env::current_dir().map_err(CliError::IoError), - } -} - -fn load_config( - cwd: &Path, - load_user_config: Option, -) -> Result { - if load_user_config == Some(false) { - return Ok(config::MergedConfigResult { - config: UserConfigFile::default(), - sources: vec![], - found: false, - }); - } - - let result = ConfigLoader::with_defaults() - .try_load(cwd) - .map_err(CliError::ConfigError)?; - - if !result.found { - let config_path = config::get_required_global_config_path() - .unwrap_or_else(|_| config::get_global_config_path()); - return Err(CliError::ConfigError(format!( - "Required config file not found at {}. Please create it before running tnmsc.", - config_path.display() - ))); - } - - Ok(result) -} - -fn resolve_workspace_dir(_cwd: &Path, config: &UserConfigFile) -> Result { - match config.workspace_dir.as_deref() { - Some(dir) => Ok(config::resolve_workspace_dir(dir)), - None => Err(CliError::ConfigError( - "workspaceDir is required but was not configured. Please set workspaceDir in your .tnmsc.json config file.".to_string(), - )), - } -} - -fn build_global_scope(config: &UserConfigFile) -> Option { - let mut scope = serde_json::Map::new(); - - let mut os = serde_json::Map::new(); - os.insert("platform".to_string(), json!(std::env::consts::OS)); - os.insert("arch".to_string(), json!(std::env::consts::ARCH)); - os.insert("name".to_string(), json!(std::env::consts::OS)); - scope.insert("os".to_string(), Value::Object(os)); - - if let Some(profile) = config.profile.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(name) = profile.name.as_ref() { - value.insert("name".to_string(), json!(name)); - } - if let Some(username) = profile.username.as_ref() { - value.insert("username".to_string(), json!(username)); - } - if let Some(gender) = profile.gender.as_ref() { - value.insert("gender".to_string(), json!(gender)); - } - if let Some(birthday) = profile.birthday.as_ref() { - value.insert("birthday".to_string(), json!(birthday)); - } - for (key, extra) in &profile.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("profile".to_string(), Value::Object(value)); - } - } - - if let Some(code_styles) = config.code_styles.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(indent) = code_styles.indent { - value.insert( - "indent".to_string(), - json!(match indent { - config::CodeStyleIndent::Tab => "tab", - config::CodeStyleIndent::Space => "space", - }), - ); - } - if let Some(tab_size) = code_styles.tab_size { - value.insert("tabSize".to_string(), json!(tab_size)); - } - for (key, extra) in &code_styles.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("codeStyles".to_string(), Value::Object(value)); - } - } - - let mut tool = serde_json::Map::new(); - tool.insert("name".to_string(), json!("tnmsc")); - tool.insert("version".to_string(), json!(crate::version())); - scope.insert("tool".to_string(), Value::Object(tool)); - - (!scope.is_empty()).then(|| Value::Object(scope)) -} - -#[derive(Debug, Clone, Copy, Default)] -struct EnabledPlugins { - agents_md: bool, - claude_code: bool, - codex: bool, - cursor: bool, - droid: bool, - gemini: bool, - git: bool, - jetbrains: bool, - jetbrains_code_style: bool, - kiro: bool, - opencode: bool, - qoder: bool, - readme: bool, - trae: bool, - trae_cn: bool, - vscode: bool, - warp: bool, - windsurf: bool, - zed: bool, -} - -impl EnabledPlugins { - fn from_config(config: Option<&PluginsConfig>) -> Self { - Self { - agents_md: config.and_then(|value| value.agents_md).unwrap_or(true), - claude_code: config.and_then(|value| value.claude_code).unwrap_or(false), - codex: config.and_then(|value| value.codex).unwrap_or(false), - cursor: config.and_then(|value| value.cursor).unwrap_or(false), - droid: config.and_then(|value| value.droid).unwrap_or(false), - gemini: config.and_then(|value| value.gemini).unwrap_or(false), - git: config.and_then(|value| value.git).unwrap_or(true), - jetbrains: config.and_then(|value| value.jetbrains).unwrap_or(false), - jetbrains_code_style: config - .and_then(|value| value.jetbrains_code_style) - .unwrap_or(false), - kiro: config.and_then(|value| value.kiro).unwrap_or(false), - opencode: config.and_then(|value| value.opencode).unwrap_or(false), - qoder: config.and_then(|value| value.qoder).unwrap_or(false), - readme: config.and_then(|value| value.readme).unwrap_or(true), - trae: config.and_then(|value| value.trae).unwrap_or(false), - trae_cn: config.and_then(|value| value.trae_cn).unwrap_or(false), - vscode: config.and_then(|value| value.vscode).unwrap_or(false), - warp: config.and_then(|value| value.warp).unwrap_or(false), - windsurf: config.and_then(|value| value.windsurf).unwrap_or(false), - zed: config.and_then(|value| value.zed).unwrap_or(false), - } - } - - fn is_enabled(self, plugin_name: &str) -> bool { - match plugin_name { - "AgentsOutputAdaptor" => self.agents_md, - "GitExcludeOutputAdaptor" => self.git, - "JetBrainsIDECodeStyleConfigOutputAdaptor" => self.jetbrains_code_style, - "VisualStudioCodeIDEConfigOutputAdaptor" => self.vscode, - "ZedIDEConfigOutputAdaptor" => self.zed, - "ReadmeMdConfigFileOutputAdaptor" => self.readme, - "ClaudeCodeCLIOutputAdaptor" => self.claude_code, - "CodexCLIOutputAdaptor" => self.codex, - "CursorOutputAdaptor" => self.cursor, - "DroidCLIOutputAdaptor" => self.droid, - "GeminiCLIOutputAdaptor" => self.gemini, - "JetBrainsAIAssistantCodexOutputAdaptor" => self.jetbrains, - "KiroCLIOutputAdaptor" => self.kiro, - "OpencodeCLIOutputAdaptor" => self.opencode, - "QoderIDEPluginOutputAdaptor" => self.qoder, - "TraeOutputAdaptor" => self.trae || self.trae_cn, - "WarpIDEOutputAdaptor" => self.warp, - "WindsurfOutputAdaptor" => self.windsurf, - _ => false, - } - } -} - -#[derive(Debug, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct WorkspaceEnvelope { - workspace: crate::domain::plugin_shared::Workspace, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct GlobalMemoryEnvelope { - #[serde(default)] - global_memory: Option, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct CommandsEnvelope { - #[serde(default)] - commands: Vec, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct SubAgentsEnvelope { - #[serde(default)] - sub_agents: Vec, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct SkillsEnvelope { - #[serde(default)] - skills: Vec, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct RulesEnvelope { - #[serde(default)] - rules: Vec, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct ReadmeEnvelope { - #[serde(default)] - readme_prompts: Vec, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct GitIgnoreEnvelope { - #[serde(default)] - global_git_ignore: Option, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct GitExcludeEnvelope { - #[serde(default)] - shadow_git_exclude: Option, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct SharedIgnoreEnvelope { - #[serde(default)] - ai_agent_ignore_config_files: Option>, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct VSCodeEnvelope { - #[serde(default)] - vscode_config_files: Option>, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct ZedEnvelope { - #[serde(default)] - zed_config_files: Option>, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct JetBrainsEnvelope { - #[serde(default)] - jetbrains_config_files: Option>, -} - -#[derive(Debug, Default, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct EditorConfigEnvelope { - #[serde(default)] - editor_config_files: Option>, -} - -#[derive(Debug, Clone)] -#[allow(dead_code)] -struct PlannedOutputFile { - path: String, - content: String, - encoding: Option, -} - -fn collect_context( - workspace_dir: &str, - _global_scope: Option<&Value>, -) -> Result { - fn collect_json( - collector: impl Fn(&str) -> Result, - input: Value, - ) -> Result - where - T: serde::de::DeserializeOwned, - { - let raw = collector(&input.to_string())?; - serde_json::from_str(&raw).map_err(CliError::SerializationError) - } - - let aindex = collect_json::( - crate::repositories::aindex_resolvers::collect_aindex_resolvers, - json!({ "workspaceDir": workspace_dir }), - )?; - - let project_prompts = collect_json::( - crate::repositories::project_prompt::collect_project_prompt, - json!({ - "workspaceDir": workspace_dir, - "workspace": aindex.workspace, - "globalScope": None::, - }), - )?; - - let global_memory = collect_json::( - crate::repositories::global_memory::collect_global_memory, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - - let commands = collect_json::( - crate::repositories::command::collect_command, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let sub_agents = collect_json::( - crate::repositories::subagent::collect_subagent, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let skills = collect_json::( - crate::repositories::skill::collect_skill, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let rules = collect_json::( - crate::repositories::rule::collect_rule, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let readme = collect_json::( - crate::repositories::readme::collect_readme, - json!({ "workspaceDir": workspace_dir, "globalScope": None:: }), - )?; - let gitignore = collect_json::( - crate::repositories::gitignore::collect_gitignore, - json!({ "workspaceDir": workspace_dir }), - )?; - let git_exclude = collect_json::( - crate::repositories::git_exclude::collect_git_exclude, - json!({ "workspaceDir": workspace_dir }), - )?; - let shared_ignore = collect_json::( - crate::repositories::shared_ignore::collect_shared_ignore, - json!({ "workspaceDir": workspace_dir }), - )?; - let vscode = collect_json::( - crate::repositories::vscode_config::collect_vscode_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let zed = collect_json::( - crate::repositories::zed_config::collect_zed_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let jetbrains = collect_json::( - crate::repositories::jetbrains_config::collect_jetbrains_config, - json!({ "workspaceDir": workspace_dir }), - )?; - let editor_config = collect_json::( - crate::repositories::editorconfig::collect_editorconfig, - json!({ "workspaceDir": workspace_dir }), - )?; - - Ok(OutputContext { - workspace: Some(project_prompts.workspace), - vscode_config_files: vscode.vscode_config_files, - zed_config_files: zed.zed_config_files, - jetbrains_config_files: jetbrains.jetbrains_config_files, - editor_config_files: editor_config.editor_config_files, - fast_commands: (!commands.commands.is_empty()).then_some(commands.commands), - sub_agents: (!sub_agents.sub_agents.is_empty()).then_some(sub_agents.sub_agents), - skills: (!skills.skills.is_empty()).then_some(skills.skills), - rules: (!rules.rules.is_empty()).then_some(rules.rules), - global_memory: global_memory.global_memory, - global_git_ignore: gitignore.global_git_ignore, - shadow_git_exclude: git_exclude.shadow_git_exclude, - shadow_source_project_dir: None, - readme_prompts: (!readme.readme_prompts.is_empty()).then_some(readme.readme_prompts), - ai_agent_ignore_config_files: shared_ignore.ai_agent_ignore_config_files, - registered_output_plugins: None, - }) -} - fn build_output_files( - context: &OutputContext, + context: &crate::context::OutputContext, enabled_plugins: EnabledPlugins, ) -> Result, CliError> { let mut outputs = BTreeMap::new(); @@ -457,83 +116,52 @@ fn build_output_files( push_base_plans(&mut outputs, &base_plans, enabled_plugins); if enabled_plugins.claude_code { - if let Ok(plan) = - crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.codex { - if let Ok(plan) = - crate::domain::output_plans::codex_output_plan::build_codex_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::codex_output_plan::build_codex_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.cursor { - if let Ok(plan) = - crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.droid { - if let Ok(plan) = - crate::domain::output_plans::droid_output_plan::build_droid_output_plan(context) - { - push_droid_output_files(&mut outputs, &plan); - } + let plan = crate::domain::output_plans::droid_output_plan::build_droid_output_plan(context)?; + push_droid_output_files(&mut outputs, &plan); } if enabled_plugins.gemini { - if let Ok(plan) = - crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.jetbrains { - if let Ok(plan) = crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context) { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.kiro { - if let Ok(plan) = crate::domain::output_plans::kiro_output_plan::build_kiro_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::kiro_output_plan::build_kiro_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.opencode { - if let Ok(plan) = - crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.qoder { - if let Ok(plan) = - crate::domain::output_plans::qoder_output_plan::build_qoder_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::qoder_output_plan::build_qoder_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.trae || enabled_plugins.trae_cn { - if let Ok(plan) = crate::domain::output_plans::trae_output_plan::build_trae_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::trae_output_plan::build_trae_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.warp { - if let Ok(plan) = crate::domain::output_plans::warp_output_plan::build_warp_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::warp_output_plan::build_warp_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } if enabled_plugins.windsurf { - if let Ok(plan) = - crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context) - { - push_base_output_files(&mut outputs, &plan.output_files); - } + let plan = crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context)?; + push_base_output_files(&mut outputs, &plan.output_files); } Ok(outputs) @@ -561,7 +189,7 @@ fn push_base_output_files( PlannedOutputFile { path: file.path.clone(), content: file.content.clone(), - encoding: None, + encoding: file.encoding.clone(), }, ); } @@ -583,27 +211,12 @@ fn push_droid_output_files( } } -fn count_missing_directories(dir: &Path) -> usize { - let mut missing = Vec::new(); - let mut current = Some(dir); - - while let Some(path) = current { - if path.exists() { - break; - } - missing.push(path.to_path_buf()); - current = path.parent(); - } - - missing.len() -} - #[cfg(test)] mod tests { use super::*; use tempfile::TempDir; - fn with_home_dir(home_dir: &Path, callback: impl FnOnce() -> T) -> T { + fn with_home_dir(home_dir: &std::path::Path, callback: impl FnOnce() -> T) -> T { let _guard = match crate::domain::TEST_ENV_LOCK.lock() { Ok(g) => g, Err(error) => error.into_inner(), @@ -628,7 +241,7 @@ mod tests { result } - fn create_test_config(home_dir: &Path, workspace_dir: &Path) -> std::io::Result<()> { + fn create_test_config(home_dir: &std::path::Path, workspace_dir: &std::path::Path) -> std::io::Result<()> { let config_content = json!({ "workspaceDir": workspace_dir.to_string_lossy() }); @@ -725,14 +338,14 @@ mod tests { fn dry_run_count_missing_directories_works() { let temp_dir = TempDir::new().unwrap(); let nested = temp_dir.path().join("a").join("b").join("c"); - let count = count_missing_directories(&nested); + let count = crate::services::common::count_missing_directories(&nested); assert_eq!(count, 3); } #[test] fn dry_run_count_missing_directories_returns_zero_for_existing() { let temp_dir = TempDir::new().unwrap(); - let count = count_missing_directories(temp_dir.path()); + let count = crate::services::common::count_missing_directories(temp_dir.path()); assert_eq!(count, 0); } } diff --git a/sdk/src/services/install_service.rs b/sdk/src/services/install_service.rs index 4532777b..7b951233 100644 --- a/sdk/src/services/install_service.rs +++ b/sdk/src/services/install_service.rs @@ -1,141 +1,22 @@ use std::collections::BTreeMap; use std::fs; -use std::path::{Path, PathBuf}; +use std::path::Path; use base64::Engine; -use serde::Deserialize; -use serde::de::DeserializeOwned; use serde_json::{Value, json}; -use crate::context::OutputContext; use crate::domain::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPlansDto}; -use crate::domain::config::{self, ConfigLoader, PluginsConfig, UserConfigFile}; use crate::domain::output_plans::droid_output_plan::DroidOutputPlanDto; -use crate::domain::plugin_shared::{ - AIAgentIgnoreConfigFile, FastCommandPrompt, GlobalMemoryPrompt, ProjectIDEConfigFile, - ReadmePrompt, RulePrompt, SkillPrompt, SubAgentPrompt, Workspace, -}; use crate::infra::desk_paths; +use crate::infra::logger::{Logger, create_logger}; use crate::policy::path_blocking; -use crate::services::command_diagnostics::build_workspace_mismatch_warning; +use crate::services::command_diagnostics_service::build_workspace_mismatch_warning; +use crate::services::common::{ + DefaultPluginKind, EnabledPlugins, collect_context, load_config, resolve_cwd, + resolve_workspace_dir, +}; use crate::{CliError, MemorySyncCommandOptions, MemorySyncCommandResult}; -const PLUGIN_AGENTS: &str = "AgentsOutputAdaptor"; -const PLUGIN_GIT: &str = "GitExcludeOutputAdaptor"; -const PLUGIN_JETBRAINS_CODE_STYLE: &str = "JetBrainsIDECodeStyleConfigOutputAdaptor"; -const PLUGIN_VSCODE: &str = "VisualStudioCodeIDEConfigOutputAdaptor"; -const PLUGIN_ZED: &str = "ZedIDEConfigOutputAdaptor"; -const PLUGIN_README: &str = "ReadmeMdConfigFileOutputAdaptor"; -const PLUGIN_CLAUDE: &str = "ClaudeCodeCLIOutputAdaptor"; -const PLUGIN_CODEX: &str = "CodexCLIOutputAdaptor"; -const PLUGIN_CURSOR: &str = "CursorOutputAdaptor"; -const PLUGIN_DROID: &str = "DroidCLIOutputAdaptor"; -const PLUGIN_GEMINI: &str = "GeminiCLIOutputAdaptor"; -const PLUGIN_JETBRAINS: &str = "JetBrainsAIAssistantCodexOutputAdaptor"; -const PLUGIN_KIRO: &str = "KiroCLIOutputAdaptor"; -const PLUGIN_OPENCODE: &str = "OpencodeCLIOutputAdaptor"; -const PLUGIN_QODER: &str = "QoderIDEPluginOutputAdaptor"; -const PLUGIN_TRAE: &str = "TraeOutputAdaptor"; -const PLUGIN_WARP: &str = "WarpIDEOutputAdaptor"; -const PLUGIN_WINDSURF: &str = "WindsurfOutputAdaptor"; - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct WorkspaceEnvelope { - workspace: Workspace, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct GlobalMemoryEnvelope { - #[serde(default)] - global_memory: Option, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct CommandsEnvelope { - #[serde(default)] - commands: Vec, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct SubAgentsEnvelope { - #[serde(default)] - sub_agents: Vec, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct SkillsEnvelope { - #[serde(default)] - skills: Vec, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct RulesEnvelope { - #[serde(default)] - rules: Vec, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct ReadmeEnvelope { - #[serde(default)] - readme_prompts: Vec, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct GitIgnoreEnvelope { - #[serde(default)] - global_git_ignore: Option, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct GitExcludeEnvelope { - #[serde(default)] - shadow_git_exclude: Option, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct SharedIgnoreEnvelope { - #[serde(default)] - ai_agent_ignore_config_files: Option>, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct VSCodeEnvelope { - #[serde(default)] - vscode_config_files: Option>, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct ZedEnvelope { - #[serde(default)] - zed_config_files: Option>, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct JetBrainsEnvelope { - #[serde(default)] - jetbrains_config_files: Option>, -} - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -struct EditorConfigEnvelope { - #[serde(default)] - editor_config_files: Option>, -} - #[derive(Debug, Clone)] struct PlannedOutputFile { path: String, @@ -143,128 +24,74 @@ struct PlannedOutputFile { encoding: Option, } -#[derive(Debug, Clone, Copy, Default)] -struct EnabledPlugins { - agents_md: bool, - claude_code: bool, - codex: bool, - cursor: bool, - droid: bool, - gemini: bool, - git: bool, - jetbrains: bool, - jetbrains_code_style: bool, - kiro: bool, - opencode: bool, - qoder: bool, - readme: bool, - trae: bool, - trae_cn: bool, - vscode: bool, - warp: bool, - windsurf: bool, - zed: bool, -} - -impl EnabledPlugins { - fn from_config(config: Option<&PluginsConfig>) -> Self { - Self { - agents_md: config.and_then(|value| value.agents_md).unwrap_or(true), - claude_code: config.and_then(|value| value.claude_code).unwrap_or(true), - codex: config.and_then(|value| value.codex).unwrap_or(false), - cursor: config.and_then(|value| value.cursor).unwrap_or(false), - droid: config.and_then(|value| value.droid).unwrap_or(false), - gemini: config.and_then(|value| value.gemini).unwrap_or(false), - git: config.and_then(|value| value.git).unwrap_or(true), - jetbrains: config.and_then(|value| value.jetbrains).unwrap_or(false), - jetbrains_code_style: config - .and_then(|value| value.jetbrains_code_style) - .unwrap_or(false), - kiro: config.and_then(|value| value.kiro).unwrap_or(false), - opencode: config.and_then(|value| value.opencode).unwrap_or(true), - qoder: config.and_then(|value| value.qoder).unwrap_or(false), - readme: config.and_then(|value| value.readme).unwrap_or(true), - trae: config.and_then(|value| value.trae).unwrap_or(false), - trae_cn: config.and_then(|value| value.trae_cn).unwrap_or(false), - vscode: config.and_then(|value| value.vscode).unwrap_or(false), - warp: config.and_then(|value| value.warp).unwrap_or(false), - windsurf: config.and_then(|value| value.windsurf).unwrap_or(false), - zed: config.and_then(|value| value.zed).unwrap_or(false), - } - } - - fn is_enabled(self, plugin_name: &str) -> bool { - match plugin_name { - PLUGIN_AGENTS => self.agents_md, - PLUGIN_GIT => self.git, - PLUGIN_JETBRAINS_CODE_STYLE => self.jetbrains_code_style, - PLUGIN_VSCODE => self.vscode, - PLUGIN_ZED => self.zed, - PLUGIN_README => self.readme, - PLUGIN_CLAUDE => self.claude_code, - PLUGIN_CODEX => self.codex, - PLUGIN_CURSOR => self.cursor, - PLUGIN_DROID => self.droid, - PLUGIN_GEMINI => self.gemini, - PLUGIN_JETBRAINS => self.jetbrains, - PLUGIN_KIRO => self.kiro, - PLUGIN_OPENCODE => self.opencode, - PLUGIN_QODER => self.qoder, - PLUGIN_TRAE => self.trae || self.trae_cn, - PLUGIN_WARP => self.warp, - PLUGIN_WINDSURF => self.windsurf, - _ => false, - } - } - - fn registered_output_plugins(self) -> Vec { - let mut plugins = Vec::new(); - for plugin_name in [ - PLUGIN_AGENTS, - PLUGIN_GIT, - PLUGIN_JETBRAINS_CODE_STYLE, - PLUGIN_VSCODE, - PLUGIN_ZED, - PLUGIN_README, - PLUGIN_CLAUDE, - PLUGIN_CODEX, - PLUGIN_CURSOR, - PLUGIN_DROID, - PLUGIN_GEMINI, - PLUGIN_JETBRAINS, - PLUGIN_KIRO, - PLUGIN_OPENCODE, - PLUGIN_QODER, - PLUGIN_TRAE, - PLUGIN_WARP, - PLUGIN_WINDSURF, - ] { - if self.is_enabled(plugin_name) { - plugins.push(plugin_name.to_string()); - } - } - plugins - } -} - pub(crate) fn install( options: MemorySyncCommandOptions, ) -> Result { + let logger = create_logger("install", options.log_level.as_deref().and_then(|s| crate::infra::logger::LogLevel::from_str_loose(s))); + let _span = logger.span("command.install").enter(); + + logger.info("Install started", Some(json!({ + "cwd": options.cwd.as_ref(), + }))); + let cwd = resolve_cwd(options.cwd.as_deref())?; + + let config_span = logger.span("config.load").enter(); let config_result = load_config(&cwd, options.load_user_config)?; + config_span.exit(); + let workspace_dir = resolve_workspace_dir(&cwd, &config_result.config)?; let mut warnings = build_workspace_mismatch_warning(&cwd, &workspace_dir, &config_result) .into_iter() .collect::>(); let workspace_dir_str = workspace_dir.to_string_lossy().into_owned(); - let global_scope = build_global_scope(&config_result.config); - let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref()); - let context = collect_context(&workspace_dir_str, global_scope.as_ref(), enabled_plugins)?; - let planned_outputs = build_output_files(&context, enabled_plugins)?; - let execution = write_output_files(&planned_outputs)?; + logger.info("Config loaded", Some(json!({ + "workspaceDir": &workspace_dir_str, + "configFound": config_result.found, + "configSources": config_result.sources, + }))); + + let global_scope = crate::services::common::build_global_scope(&config_result.config); + let enabled_plugins = EnabledPlugins::from_config(config_result.config.plugins.as_ref(), DefaultPluginKind::Install); + + logger.info("Plugins resolved", Some(json!({ + "enabled": enabled_plugins.registered_plugins(), + }))); + + let context_span = logger.span("context.collect").enter(); + let context = collect_context(&workspace_dir_str, global_scope.as_ref(), &enabled_plugins, &logger)?; + context_span.exit(); + + logger.info("Context collected", Some(json!({ + "globalMemory": context.global_memory.is_some(), + "commands": context.fast_commands.as_ref().map(|v| v.len()), + "skills": context.skills.as_ref().map(|v| v.len()), + "rules": context.rules.as_ref().map(|v| v.len()), + }))); + + let output_span = logger.span("output.build").enter(); + let planned_outputs = build_output_files(&context, enabled_plugins, &logger)?; + output_span.exit(); + + logger.info("Output files built", Some(json!({ + "filesPlanned": planned_outputs.len(), + }))); + + let write_span = logger.span("files.write").enter(); + let execution = write_output_files(&planned_outputs, &logger)?; + write_span.exit(); + warnings.extend(execution.warnings); + logger.info("Install completed", Some(json!({ + "success": execution.errors.is_empty(), + "filesAffected": execution.files_affected, + "dirsAffected": execution.dirs_affected, + "warnings": warnings.len(), + "errors": execution.errors.len(), + }))); + Ok(MemorySyncCommandResult { success: execution.errors.is_empty(), files_affected: execution.files_affected as i32, @@ -284,312 +111,89 @@ pub(crate) fn install( }) } -fn resolve_cwd(cwd: Option<&str>) -> Result { - match cwd { - Some(value) => Ok(config::resolve_workspace_dir(value)), - None => std::env::current_dir().map_err(CliError::IoError), - } -} - -fn load_config( - cwd: &Path, - load_user_config: Option, -) -> Result { - if load_user_config == Some(false) { - return Ok(config::MergedConfigResult { - config: UserConfigFile::default(), - sources: vec![], - found: false, - }); - } - - let result = ConfigLoader::with_defaults() - .try_load(cwd) - .map_err(CliError::ConfigError)?; - - if !result.found { - let config_path = config::get_required_global_config_path() - .unwrap_or_else(|_| config::get_global_config_path()); - return Err(CliError::ConfigError(format!( - "Required config file not found at {}. Please create it before running tnmsc.", - config_path.display() - ))); - } - - Ok(result) -} - -fn resolve_workspace_dir(_cwd: &Path, config: &UserConfigFile) -> Result { - match config.workspace_dir.as_deref() { - Some(dir) => Ok(config::resolve_workspace_dir(dir)), - None => Err(CliError::ConfigError( - "workspaceDir is required but was not configured. Please set workspaceDir in your .tnmsc.json config file.".to_string(), - )), - } -} - -fn build_global_scope(config: &UserConfigFile) -> Option { - let mut scope = serde_json::Map::new(); - - let mut os = serde_json::Map::new(); - os.insert("platform".to_string(), json!(std::env::consts::OS)); - os.insert("arch".to_string(), json!(std::env::consts::ARCH)); - os.insert("name".to_string(), json!(std::env::consts::OS)); - scope.insert("os".to_string(), Value::Object(os)); - - if let Some(profile) = config.profile.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(name) = profile.name.as_ref() { - value.insert("name".to_string(), json!(name)); - } - if let Some(username) = profile.username.as_ref() { - value.insert("username".to_string(), json!(username)); - } - if let Some(gender) = profile.gender.as_ref() { - value.insert("gender".to_string(), json!(gender)); - } - if let Some(birthday) = profile.birthday.as_ref() { - value.insert("birthday".to_string(), json!(birthday)); - } - for (key, extra) in &profile.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("profile".to_string(), Value::Object(value)); - } - } - - if let Some(code_styles) = config.code_styles.as_ref() { - let mut value = serde_json::Map::new(); - if let Some(indent) = code_styles.indent { - value.insert( - "indent".to_string(), - json!(match indent { - config::CodeStyleIndent::Tab => "tab", - config::CodeStyleIndent::Space => "space", - }), - ); - } - if let Some(tab_size) = code_styles.tab_size { - value.insert("tabSize".to_string(), json!(tab_size)); - } - for (key, extra) in &code_styles.extra { - value.insert(key.clone(), extra.clone()); - } - if !value.is_empty() { - scope.insert("codeStyles".to_string(), Value::Object(value)); - } - } - - let mut tool = serde_json::Map::new(); - tool.insert("name".to_string(), json!("tnmsc")); - tool.insert("version".to_string(), json!(crate::version())); - scope.insert("tool".to_string(), Value::Object(tool)); - - (!scope.is_empty()).then(|| Value::Object(scope)) -} - -fn collect_context( - workspace_dir: &str, - global_scope: Option<&Value>, - enabled_plugins: EnabledPlugins, -) -> Result { - let aindex = collect_json::( - crate::repositories::aindex_resolvers::collect_aindex_resolvers, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - - let project_prompts = collect_json::( - crate::repositories::project_prompt::collect_project_prompt, - json!({ - "workspaceDir": workspace_dir, - "workspace": aindex.workspace, - "globalScope": global_scope, - }), - )?; - - let global_memory = collect_json::( - crate::repositories::global_memory::collect_global_memory, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - - let commands = collect_json::( - crate::repositories::command::collect_command, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - - let sub_agents = collect_json::( - crate::repositories::subagent::collect_subagent, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - - let skills = collect_json::( - crate::repositories::skill::collect_skill, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - - let rules = collect_json::( - crate::repositories::rule::collect_rule, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - let readme = collect_json::( - crate::repositories::readme::collect_readme, - json!({ - "workspaceDir": workspace_dir, - "globalScope": global_scope, - }), - )?; - let gitignore = collect_json::( - crate::repositories::gitignore::collect_gitignore, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let git_exclude = collect_json::( - crate::repositories::git_exclude::collect_git_exclude, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let shared_ignore = collect_json::( - crate::repositories::shared_ignore::collect_shared_ignore, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let vscode = collect_json::( - crate::repositories::vscode_config::collect_vscode_config, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let zed = collect_json::( - crate::repositories::zed_config::collect_zed_config, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let jetbrains = collect_json::( - crate::repositories::jetbrains_config::collect_jetbrains_config, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - let editor_config = collect_json::( - crate::repositories::editorconfig::collect_editorconfig, - json!({ - "workspaceDir": workspace_dir, - }), - )?; - - Ok(OutputContext { - workspace: Some(project_prompts.workspace), - vscode_config_files: vscode.vscode_config_files, - zed_config_files: zed.zed_config_files, - jetbrains_config_files: jetbrains.jetbrains_config_files, - editor_config_files: editor_config.editor_config_files, - fast_commands: (!commands.commands.is_empty()).then_some(commands.commands), - sub_agents: (!sub_agents.sub_agents.is_empty()).then_some(sub_agents.sub_agents), - skills: (!skills.skills.is_empty()).then_some(skills.skills), - rules: (!rules.rules.is_empty()).then_some(rules.rules), - global_memory: global_memory.global_memory, - global_git_ignore: gitignore.global_git_ignore, - shadow_git_exclude: git_exclude.shadow_git_exclude, - shadow_source_project_dir: None, - readme_prompts: (!readme.readme_prompts.is_empty()).then_some(readme.readme_prompts), - ai_agent_ignore_config_files: shared_ignore.ai_agent_ignore_config_files, - registered_output_plugins: Some(enabled_plugins.registered_output_plugins()), - }) -} - -fn collect_json( - collector: impl Fn(&str) -> Result, - input: Value, -) -> Result -where - T: DeserializeOwned, -{ - let raw = collector(&input.to_string())?; - serde_json::from_str(&raw).map_err(CliError::SerializationError) -} - fn build_output_files( - context: &OutputContext, + context: &crate::context::OutputContext, enabled_plugins: EnabledPlugins, + logger: &Logger, ) -> Result, CliError> { let mut outputs = BTreeMap::new(); + let base_span = logger.span("output.base_plans").enter(); let base_plans = crate::domain::base_output_plans::build_base_output_plans(context)?; push_base_plans(&mut outputs, &base_plans, enabled_plugins); + base_span.exit(); if enabled_plugins.claude_code { - let plan = - crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context)?; + let plugin_span = logger.span("output.claude_code").enter(); + let plan = crate::domain::output_plans::claude_code_output_plan::build_claude_code_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.codex { + let plugin_span = logger.span("output.codex").enter(); let plan = crate::domain::output_plans::codex_output_plan::build_codex_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.cursor { + let plugin_span = logger.span("output.cursor").enter(); let plan = crate::domain::output_plans::cursor_output_plan::build_cursor_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.droid { + let plugin_span = logger.span("output.droid").enter(); let plan = crate::domain::output_plans::droid_output_plan::build_droid_output_plan(context)?; push_droid_output_files(&mut outputs, &plan); + plugin_span.exit(); } if enabled_plugins.gemini { + let plugin_span = logger.span("output.gemini").enter(); let plan = crate::domain::output_plans::gemini_output_plan::build_gemini_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.jetbrains { - let plan = - crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context)?; + let plugin_span = logger.span("output.jetbrains").enter(); + let plan = crate::domain::output_plans::jetbrains_ai_assistant_codex_output_plan::build_jetbrains_ai_assistant_codex_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.kiro { + let plugin_span = logger.span("output.kiro").enter(); let plan = crate::domain::output_plans::kiro_output_plan::build_kiro_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.opencode { - let plan = - crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context)?; + let plugin_span = logger.span("output.opencode").enter(); + let plan = crate::domain::output_plans::opencode_output_plan::build_opencode_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.qoder { + let plugin_span = logger.span("output.qoder").enter(); let plan = crate::domain::output_plans::qoder_output_plan::build_qoder_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.trae || enabled_plugins.trae_cn { + let plugin_span = logger.span("output.trae").enter(); let plan = crate::domain::output_plans::trae_output_plan::build_trae_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.warp { + let plugin_span = logger.span("output.warp").enter(); let plan = crate::domain::output_plans::warp_output_plan::build_warp_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } if enabled_plugins.windsurf { - let plan = - crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context)?; + let plugin_span = logger.span("output.windsurf").enter(); + let plan = crate::domain::output_plans::windsurf_output_plan::build_windsurf_output_plan(context)?; push_base_output_files(&mut outputs, &plan.output_files); + plugin_span.exit(); } Ok(outputs) @@ -648,6 +252,7 @@ struct InstallExecutionResult { fn write_output_files( outputs: &BTreeMap, + logger: &Logger, ) -> Result { let mut files_affected = 0usize; let mut dirs_affected = 0usize; @@ -683,6 +288,7 @@ fn write_output_files( let existing = fs::read(path).ok(); if existing.as_deref() == Some(bytes.as_slice()) { + logger.debug(format!("file.skipped: {}", file.path), Some(json!({ "reason": "unchanged" }))); continue; } @@ -694,6 +300,7 @@ fn write_output_files( continue; } + logger.info(format!("file.written: {}", file.path), None); files_affected += 1; } @@ -728,7 +335,7 @@ fn prepare_target_path(path: &Path, warnings: &mut Vec) -> Result) -> Result usize { - let mut missing = Vec::new(); - let mut current = Some(dir); - - while let Some(path) = current { - if path.exists() { - break; - } - missing.push(path.to_path_buf()); - current = path.parent(); - } - - missing.len() -} - #[cfg(test)] mod tests { use super::*; + use crate::domain::config::UserConfigFile; use std::path::PathBuf; #[test] diff --git a/sdk/src/services/mod.rs b/sdk/src/services/mod.rs index 3afe57e0..4a09122a 100644 --- a/sdk/src/services/mod.rs +++ b/sdk/src/services/mod.rs @@ -1,5 +1,6 @@ pub mod clean_service; -pub mod command_diagnostics; +pub mod command_diagnostics_service; +pub mod common; pub mod dry_run_service; pub mod install_service; -pub mod prompts; +pub mod prompt_service; diff --git a/sdk/src/services/prompts.rs b/sdk/src/services/prompt_service.rs similarity index 97% rename from sdk/src/services/prompts.rs rename to sdk/src/services/prompt_service.rs index 7059696a..a7415091 100644 --- a/sdk/src/services/prompts.rs +++ b/sdk/src/services/prompt_service.rs @@ -1061,6 +1061,9 @@ fn build_prompt_definition_from_id( // --------------------------------------------------------------------------- pub fn list_prompts(options: &ListPromptsOptions) -> Result, String> { + let logger = crate::infra::logger::create_logger("prompt_service", None); + let _span = logger.span("prompt.list").enter(); + let env = resolve_prompt_environment(&options.base)?; let items: Vec = collect_discovered_prompt_ids(&env) .into_iter() @@ -1073,6 +1076,8 @@ pub fn list_prompts(options: &ListPromptsOptions) -> Result Result, String> { + let logger = crate::infra::logger::create_logger("prompt_service", None); + let _span = logger.span("prompt.get").enter(); + let env = resolve_prompt_environment(options)?; let def = build_prompt_definition_from_id(prompt_id, &env)?; - Ok(hydrate_prompt(&def, true)) + let result = hydrate_prompt(&def, true); + + logger.info(format!("Get prompt: {}", prompt_id), Some(serde_json::json!({ "found": result.is_some() }))); + Ok(result) } pub fn upsert_prompt_source(input: &UpsertPromptSourceInput) -> Result { + let logger = crate::infra::logger::create_logger("prompt_service", None); + let _span = logger.span("prompt.upsert").enter(); + let env = resolve_prompt_environment(&input.base)?; let definition = build_prompt_definition_from_id(&input.prompt_id, &env)?; let locale = input.locale.unwrap_or(PromptSourceLocale::Zh); @@ -1098,10 +1112,15 @@ pub fn upsert_prompt_source(input: &UpsertPromptSourceInput) -> Result Result { + let logger = crate::infra::logger::create_logger("prompt_service", None); + let _span = logger.span("prompt.write_artifacts").enter(); + if input.en_content.is_none() { return Err("writePromptArtifacts requires enContent".to_string()); } @@ -1113,5 +1132,7 @@ pub fn write_prompt_artifacts(input: &WritePromptArtifactsInput) -> Result Date: Sat, 25 Apr 2026 09:52:37 +0800 Subject: [PATCH 3/3] chore: release 2026.10425.10151 --- Cargo.lock | 18 +++++++++--------- Cargo.toml | 2 +- cli/npm/darwin-arm64/package.json | 2 +- cli/npm/darwin-x64/package.json | 2 +- cli/npm/linux-arm64-gnu/package.json | 2 +- cli/npm/linux-x64-gnu/package.json | 2 +- cli/npm/win32-x64-msvc/package.json | 2 +- cli/package.json | 12 ++++++------ doc/package.json | 2 +- gui/package.json | 2 +- gui/src-tauri/Cargo.toml | 2 +- gui/src-tauri/tauri.conf.json | 2 +- mcp/npm/darwin-arm64/package.json | 2 +- mcp/npm/darwin-x64/package.json | 2 +- mcp/npm/linux-arm64-gnu/package.json | 2 +- mcp/npm/linux-x64-gnu/package.json | 2 +- mcp/npm/win32-x64-msvc/package.json | 2 +- mcp/package.json | 12 ++++++------ package.json | 2 +- 19 files changed, 37 insertions(+), 37 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 38861b7d..01e6c251 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3062,7 +3062,7 @@ dependencies = [ [[package]] name = "memory-sync" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "tnmsc", ] @@ -6223,7 +6223,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tnmsc" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "clap", "serde_json", @@ -6232,7 +6232,7 @@ dependencies = [ [[package]] name = "tnmsc-integrate-tests" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "flate2", "serde_json", @@ -6242,7 +6242,7 @@ dependencies = [ [[package]] name = "tnmsc-local-tests" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "dirs", "json5", @@ -6251,7 +6251,7 @@ dependencies = [ [[package]] name = "tnmsd" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "base64 0.22.1", "chrono", @@ -6279,7 +6279,7 @@ dependencies = [ [[package]] name = "tnmsg" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "dirs", "proptest", @@ -6294,7 +6294,7 @@ dependencies = [ [[package]] name = "tnmsm" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "clap", "serde_json", @@ -6303,7 +6303,7 @@ dependencies = [ [[package]] name = "tnmsm-integrate-tests" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "serde_json", "testcontainers", @@ -7819,7 +7819,7 @@ dependencies = [ [[package]] name = "xtask" -version = "2026.10424.111" +version = "2026.10425.10151" dependencies = [ "clap", "serde", diff --git a/Cargo.toml b/Cargo.toml index 53b112c6..dddfa3e8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,7 +29,7 @@ members = [ ] [workspace.package] -version = "2026.10424.111" +version = "2026.10425.10151" edition = "2024" rust-version = "1.88" license = "AGPL-3.0-only" diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json index 8f602023..6258f449 100644 --- a/cli/npm/darwin-arm64/package.json +++ b/cli/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-arm64", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsc native binary for macOS arm64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json index b69602e4..edb63393 100644 --- a/cli/npm/darwin-x64/package.json +++ b/cli/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-x64", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsc native binary for macOS x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json index 9165ce52..a3d0c325 100644 --- a/cli/npm/linux-arm64-gnu/package.json +++ b/cli/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-arm64-gnu", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsc native binary for Linux arm64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json index 71e0c89f..243e6350 100644 --- a/cli/npm/linux-x64-gnu/package.json +++ b/cli/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-x64-gnu", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsc native binary for Linux x64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json index 32be222e..1e29886d 100644 --- a/cli/npm/win32-x64-msvc/package.json +++ b/cli/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-win32-x64-msvc", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsc native binary for Windows x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/package.json b/cli/package.json index e2fd1921..6ba5505c 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "TrueNine Memory Synchronization CLI metadata package", "author": "TrueNine", "license": "AGPL-3.0-only", @@ -34,10 +34,10 @@ "test": "cargo test --manifest-path Cargo.toml" }, "optionalDependencies": { - "@truenine/memory-sync-cli-darwin-arm64": "2026.10424.111", - "@truenine/memory-sync-cli-darwin-x64": "2026.10424.111", - "@truenine/memory-sync-cli-linux-arm64-gnu": "2026.10424.111", - "@truenine/memory-sync-cli-linux-x64-gnu": "2026.10424.111", - "@truenine/memory-sync-cli-win32-x64-msvc": "2026.10424.111" + "@truenine/memory-sync-cli-darwin-arm64": "2026.10425.10151", + "@truenine/memory-sync-cli-darwin-x64": "2026.10425.10151", + "@truenine/memory-sync-cli-linux-arm64-gnu": "2026.10425.10151", + "@truenine/memory-sync-cli-linux-x64-gnu": "2026.10425.10151", + "@truenine/memory-sync-cli-win32-x64-msvc": "2026.10425.10151" } } diff --git a/doc/package.json b/doc/package.json index 269400e5..75056af1 100644 --- a/doc/package.json +++ b/doc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-docs", - "version": "2026.10424.111", + "version": "2026.10425.10151", "private": true, "packageManager": "pnpm@10.33.0", "description": "Chinese-first manifesto-led documentation site for @truenine/memory-sync.", diff --git a/gui/package.json b/gui/package.json index f1bf5139..b5a5cabb 100644 --- a/gui/package.json +++ b/gui/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-gui", - "version": "2026.10424.111", + "version": "2026.10425.10151", "private": true, "engines": { "node": ">= 22" diff --git a/gui/src-tauri/Cargo.toml b/gui/src-tauri/Cargo.toml index f01a81b9..06405893 100644 --- a/gui/src-tauri/Cargo.toml +++ b/gui/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tnmsg" -version = "2026.10424.111" +version = "2026.10425.10151" description = "Memory Sync desktop GUI application" authors.workspace = true edition.workspace = true diff --git a/gui/src-tauri/tauri.conf.json b/gui/src-tauri/tauri.conf.json index 28f8c704..7e6f1c67 100644 --- a/gui/src-tauri/tauri.conf.json +++ b/gui/src-tauri/tauri.conf.json @@ -1,6 +1,6 @@ { "$schema": "https://schema.tauri.app/config/2", - "version": "2026.10424.111", + "version": "2026.10425.10151", "productName": "Memory Sync", "identifier": "org.truenine.memory-sync", "build": { diff --git a/mcp/npm/darwin-arm64/package.json b/mcp/npm/darwin-arm64/package.json index 90f0ef87..bd246f0e 100644 --- a/mcp/npm/darwin-arm64/package.json +++ b/mcp/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-darwin-arm64", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsm native binary for macOS arm64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/darwin-x64/package.json b/mcp/npm/darwin-x64/package.json index aac18393..cafdbdc6 100644 --- a/mcp/npm/darwin-x64/package.json +++ b/mcp/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-darwin-x64", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsm native binary for macOS x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/linux-arm64-gnu/package.json b/mcp/npm/linux-arm64-gnu/package.json index e95b95d4..3bc21c57 100644 --- a/mcp/npm/linux-arm64-gnu/package.json +++ b/mcp/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-linux-arm64-gnu", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsm native binary for Linux arm64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/linux-x64-gnu/package.json b/mcp/npm/linux-x64-gnu/package.json index f5cd7cd0..3545938a 100644 --- a/mcp/npm/linux-x64-gnu/package.json +++ b/mcp/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-linux-x64-gnu", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsm native binary for Linux x64 (glibc)", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/npm/win32-x64-msvc/package.json b/mcp/npm/win32-x64-msvc/package.json index 2c450d08..aa716f6d 100644 --- a/mcp/npm/win32-x64-msvc/package.json +++ b/mcp/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp-win32-x64-msvc", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "tnmsm native binary for Windows x64", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/mcp/package.json b/mcp/package.json index 1618821a..2469895f 100644 --- a/mcp/package.json +++ b/mcp/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-mcp", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "TrueNine Memory Sync MCP metadata package", "author": "TrueNine", "license": "AGPL-3.0-only", @@ -32,10 +32,10 @@ "test": "cargo test --manifest-path Cargo.toml" }, "optionalDependencies": { - "@truenine/memory-sync-mcp-darwin-arm64": "2026.10424.111", - "@truenine/memory-sync-mcp-darwin-x64": "2026.10424.111", - "@truenine/memory-sync-mcp-linux-arm64-gnu": "2026.10424.111", - "@truenine/memory-sync-mcp-linux-x64-gnu": "2026.10424.111", - "@truenine/memory-sync-mcp-win32-x64-msvc": "2026.10424.111" + "@truenine/memory-sync-mcp-darwin-arm64": "2026.10425.10151", + "@truenine/memory-sync-mcp-darwin-x64": "2026.10425.10151", + "@truenine/memory-sync-mcp-linux-arm64-gnu": "2026.10425.10151", + "@truenine/memory-sync-mcp-linux-x64-gnu": "2026.10425.10151", + "@truenine/memory-sync-mcp-win32-x64-msvc": "2026.10425.10151" } } diff --git a/package.json b/package.json index 322fecbb..96f83d20 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync", - "version": "2026.10424.111", + "version": "2026.10425.10151", "description": "Cross-AI-tool prompt synchronisation toolkit (CLI + Tauri desktop GUI) — one ruleset, multi-target adaptation. Monorepo powered by pnpm + Turbo.", "license": "AGPL-3.0-only", "keywords": [