diff --git a/src/cli/commands/interactive.ts b/src/cli/commands/interactive.ts index dbafd58..56362f5 100644 --- a/src/cli/commands/interactive.ts +++ b/src/cli/commands/interactive.ts @@ -1,5 +1,4 @@ import chalk from "chalk"; -import { logger } from "../../shared/logger"; import { showMainMenu, showGoodbyeScreen, pressAnyKey } from "../tui"; import { authCommand, loadConfig } from "./auth"; import { configCommand } from "./config"; @@ -11,9 +10,6 @@ import { startCommand } from "./start"; export async function interactiveMode(): Promise { let running = true; - // Initialize logger early so logs dir is created on first run - logger.debug("TxtCode interactive mode started"); - while (running) { console.clear(); diff --git a/src/cli/commands/logs.ts b/src/cli/commands/logs.ts index be485a6..ccd71ba 100644 --- a/src/cli/commands/logs.ts +++ b/src/cli/commands/logs.ts @@ -126,10 +126,6 @@ export function logsCommand(session: string | undefined, options: LogsOptions) { console.log(` ${chalk.white(`[${i + 1}]`)} ${ts} ${chalk.gray(`(${sizeStr})`)}${label}`); } - console.log(""); - console.log(chalk.gray(" txtcode logs View a session")); - console.log(chalk.gray(" txtcode logs -f Follow the latest session")); - console.log(chalk.gray(" txtcode logs --clear Delete all logs")); console.log(""); return; } diff --git a/src/cli/commands/start.ts b/src/cli/commands/start.ts index 20085fb..f18081b 100644 --- a/src/cli/commands/start.ts +++ b/src/cli/commands/start.ts @@ -148,31 +148,55 @@ export async function startCommand(_options: { daemon?: boolean }) { process.on("SIGINT", shutdownHandler); process.on("SIGTERM", shutdownHandler); + // Set up Enter key listener to stop the agent + const waitForEnter = new Promise((resolve) => { + process.stdin.setRawMode?.(false); + process.stdin.resume(); + process.stdin.once("data", () => resolve()); + }); + try { + let bot: { start(): Promise }; + if (config.platform === "whatsapp") { - const bot = new WhatsAppBot(agent); - await bot.start(); + bot = new WhatsAppBot(agent); } else if (config.platform === "telegram") { - const bot = new TelegramBot(agent); - await bot.start(); + bot = new TelegramBot(agent); } else if (config.platform === "discord") { - const bot = new DiscordBot(agent); - await bot.start(); + bot = new DiscordBot(agent); } else if (config.platform === "slack") { - const bot = new SlackBot(agent); - await bot.start(); + bot = new SlackBot(agent); } else if (config.platform === "teams") { - const bot = new TeamsBot(agent); - await bot.start(); + bot = new TeamsBot(agent); } else if (config.platform === "signal") { - const bot = new SignalBot(agent); - await bot.start(); + bot = new SignalBot(agent); } else { logger.error("Invalid platform specified"); process.exit(1); } + + // Start bot without blocking — race with Enter key + bot.start().catch((error) => { + logger.error("Failed to start agent", error); + process.exit(1); + }); + + // Show message after a short delay to let the bot print its startup logs + setTimeout(() => { + console.log(chalk.gray("\nPress Enter to stop the agent...\n")); + }, 2000); + + // Wait for user to press Enter + await waitForEnter; + + logger.debug("User requested stop via Enter key"); + process.stdin.pause(); + await agent.shutdown(); + // Return to main menu instead of exiting + return; } catch (error) { logger.error("Failed to start agent", error); - process.exit(1); + // Return to main menu instead of exiting + return; } } diff --git a/src/core/router.ts b/src/core/router.ts index c2eccf3..7c6845f 100644 --- a/src/core/router.ts +++ b/src/core/router.ts @@ -249,6 +249,25 @@ export class Router { return "[WARN] AI model not configured. Run: txtcode config"; } + logger.debug(`[Router] Chat → provider=${this.provider}, model=${this.model}`); + const startTime = Date.now(); + + try { + const result = await this._routeToProvider(instruction); + logger.debug( + `[Router] Chat complete → provider=${this.provider}, time=${Date.now() - startTime}ms, response=${result.length} chars`, + ); + return result; + } catch (error) { + logger.error( + `[Router] Chat failed → provider=${this.provider}, time=${Date.now() - startTime}ms`, + error, + ); + throw error; + } + } + + private async _routeToProvider(instruction: string): Promise { switch (this.provider) { case "anthropic": return await processWithAnthropic(instruction, this.apiKey, this.model, this.toolRegistry); @@ -287,6 +306,9 @@ export class Router { this.currentAbortController = new AbortController(); const signal = this.currentAbortController.signal; + logger.debug(`[Router] Code → adapter=${this.currentAdapterName}`); + const startTime = Date.now(); + try { this.contextManager.addEntry("user", instruction); @@ -307,6 +329,10 @@ export class Router { this.contextManager.addEntry("assistant", result); + logger.debug( + `[Router] Code complete → adapter=${this.currentAdapterName}, time=${Date.now() - startTime}ms, response=${result.length} chars`, + ); + return result; } finally { this.currentAbortController = null; diff --git a/src/providers/anthropic.ts b/src/providers/anthropic.ts index 9966a3b..690fe71 100644 --- a/src/providers/anthropic.ts +++ b/src/providers/anthropic.ts @@ -9,6 +9,7 @@ import type { ToolUnion, ToolUseBlock, } from "@anthropic-ai/sdk/resources/messages/messages"; +import { logger } from "../shared/logger"; import { ToolRegistry } from "../tools/registry"; const MAX_ITERATIONS = 10; @@ -28,6 +29,9 @@ export async function processWithAnthropic( model: string, toolRegistry?: ToolRegistry, ): Promise { + const startTime = Date.now(); + logger.debug(`[Anthropic] Request → model=${model}, prompt=${instruction.length} chars`); + try { const anthropic = new Anthropic({ apiKey }); @@ -38,6 +42,7 @@ export async function processWithAnthropic( const messages: MessageParam[] = [{ role: "user", content: instruction }]; for (let i = 0; i < MAX_ITERATIONS; i++) { + const iterStart = Date.now(); const response = await anthropic.messages.create({ model, max_tokens: 4096, @@ -46,6 +51,12 @@ export async function processWithAnthropic( ...(tools ? { tools } : {}), }); + logger.debug( + `[Anthropic] Response ← iteration=${i + 1}, stop=${response.stop_reason}, ` + + `tokens=${response.usage.input_tokens}in/${response.usage.output_tokens}out, ` + + `time=${Date.now() - iterStart}ms`, + ); + const textParts = response.content .filter((block: ContentBlock): block is TextBlock => block.type === "text") .map((block: TextBlock) => block.text); @@ -55,9 +66,12 @@ export async function processWithAnthropic( ); if (toolCalls.length === 0 || !toolRegistry) { + logger.debug(`[Anthropic] Done in ${Date.now() - startTime}ms (${i + 1} iteration(s))`); return textParts.join("\n") || "No response from Claude"; } + logger.debug(`[Anthropic] Tool calls: ${toolCalls.map((t) => t.name).join(", ")}`); + messages.push({ role: "assistant", content: response.content }); const toolResults: ToolResultBlockParam[] = []; @@ -76,8 +90,10 @@ export async function processWithAnthropic( messages.push({ role: "user", content: toolResults }); } + logger.warn(`[Anthropic] Reached max ${MAX_ITERATIONS} iterations`); return "Reached maximum tool iterations."; } catch (error: unknown) { + logger.error(`[Anthropic] API error after ${Date.now() - startTime}ms`, error); throw new Error( `Anthropic API error: ${error instanceof Error ? error.message : "Unknown error"}`, { cause: error }, diff --git a/src/providers/gemini.ts b/src/providers/gemini.ts index 986231f..7465e41 100644 --- a/src/providers/gemini.ts +++ b/src/providers/gemini.ts @@ -5,6 +5,7 @@ import { type Tool as GeminiTool, GoogleGenerativeAI, } from "@google/generative-ai"; +import { logger } from "../shared/logger"; import { ToolRegistry } from "../tools/registry"; const MAX_ITERATIONS = 10; @@ -24,6 +25,9 @@ export async function processWithGemini( model: string, toolRegistry?: ToolRegistry, ): Promise { + const startTime = Date.now(); + logger.debug(`[Gemini] Request → model=${model}, prompt=${instruction.length} chars`); + try { const genAI = new GoogleGenerativeAI(apiKey); @@ -38,16 +42,26 @@ export async function processWithGemini( }); const chat = genModel.startChat(); + let iterStart = Date.now(); let result = await chat.sendMessage(instruction); for (let i = 0; i < MAX_ITERATIONS; i++) { const response = result.response; const calls = response.functionCalls(); + logger.debug( + `[Gemini] Response ← iteration=${i + 1}, ` + + `toolCalls=${calls?.length ?? 0}, ` + + `time=${Date.now() - iterStart}ms`, + ); + if (!calls || calls.length === 0 || !toolRegistry) { + logger.debug(`[Gemini] Done in ${Date.now() - startTime}ms (${i + 1} iteration(s))`); return response.text(); } + logger.debug(`[Gemini] Tool calls: ${calls.map((c) => c.name).join(", ")}`); + const toolResults: FunctionResponsePart[] = []; for (const call of calls) { const execResult = await toolRegistry.execute( @@ -62,11 +76,14 @@ export async function processWithGemini( }); } + iterStart = Date.now(); result = await chat.sendMessage(toolResults); } + logger.warn(`[Gemini] Reached max ${MAX_ITERATIONS} iterations`); return "Reached maximum tool iterations."; } catch (error: unknown) { + logger.error(`[Gemini] API error after ${Date.now() - startTime}ms`, error); throw new Error( `Gemini API error: ${error instanceof Error ? error.message : "Unknown error"}`, { cause: error }, diff --git a/src/providers/huggingface.ts b/src/providers/huggingface.ts index 95989b7..43066a2 100644 --- a/src/providers/huggingface.ts +++ b/src/providers/huggingface.ts @@ -5,6 +5,7 @@ import type { ChatCompletionMessageParam, ChatCompletionTool, } from "openai/resources/chat/completions/completions"; +import { logger } from "../shared/logger"; import { ToolRegistry } from "../tools/registry"; const MAX_ITERATIONS = 10; @@ -25,6 +26,9 @@ export async function processWithHuggingFace( model: string, toolRegistry?: ToolRegistry, ): Promise { + const startTime = Date.now(); + logger.debug(`[HuggingFace] Request → model=${model}, prompt=${instruction.length} chars`); + try { // HuggingFace Inference Providers use OpenAI-compatible API const client = new OpenAI({ @@ -42,6 +46,7 @@ export async function processWithHuggingFace( ]; for (let i = 0; i < MAX_ITERATIONS; i++) { + const iterStart = Date.now(); const completion = await client.chat.completions.create({ model, messages, @@ -52,10 +57,21 @@ export async function processWithHuggingFace( const choice = completion.choices[0]; const assistantMsg = choice.message; + logger.debug( + `[HuggingFace] Response ← iteration=${i + 1}, finish=${choice.finish_reason}, ` + + `tokens=${completion.usage?.prompt_tokens ?? "?"}in/${completion.usage?.completion_tokens ?? "?"}out, ` + + `time=${Date.now() - iterStart}ms`, + ); + if (!assistantMsg.tool_calls || assistantMsg.tool_calls.length === 0 || !toolRegistry) { + logger.debug(`[HuggingFace] Done in ${Date.now() - startTime}ms (${i + 1} iteration(s))`); return assistantMsg.content || "No response from HuggingFace"; } + logger.debug( + `[HuggingFace] Tool calls: ${assistantMsg.tool_calls.map((t) => ("function" in t ? t.function.name : t.type)).join(", ")}`, + ); + messages.push(assistantMsg); for (const toolCall of assistantMsg.tool_calls) { @@ -72,8 +88,10 @@ export async function processWithHuggingFace( } } + logger.warn(`[HuggingFace] Reached max ${MAX_ITERATIONS} iterations`); return "Reached maximum tool iterations."; } catch (error: unknown) { + logger.error(`[HuggingFace] API error after ${Date.now() - startTime}ms`, error); throw new Error( `HuggingFace API error: ${error instanceof Error ? error.message : "Unknown error"}`, { cause: error }, diff --git a/src/providers/minimax.ts b/src/providers/minimax.ts index aa0326c..83933cd 100644 --- a/src/providers/minimax.ts +++ b/src/providers/minimax.ts @@ -9,6 +9,7 @@ import type { ToolUnion, ToolUseBlock, } from "@anthropic-ai/sdk/resources/messages/messages"; +import { logger } from "../shared/logger"; import { ToolRegistry } from "../tools/registry"; const MAX_ITERATIONS = 10; @@ -29,6 +30,9 @@ export async function processWithMiniMax( model: string, toolRegistry?: ToolRegistry, ): Promise { + const startTime = Date.now(); + logger.debug(`[MiniMax] Request → model=${model}, prompt=${instruction.length} chars`); + try { // MiniMax uses Anthropic-compatible API const client = new Anthropic({ @@ -43,6 +47,7 @@ export async function processWithMiniMax( const messages: MessageParam[] = [{ role: "user", content: instruction }]; for (let i = 0; i < MAX_ITERATIONS; i++) { + const iterStart = Date.now(); const response = await client.messages.create({ model, max_tokens: 4096, @@ -51,6 +56,12 @@ export async function processWithMiniMax( ...(tools ? { tools } : {}), }); + logger.debug( + `[MiniMax] Response ← iteration=${i + 1}, stop=${response.stop_reason}, ` + + `tokens=${response.usage.input_tokens}in/${response.usage.output_tokens}out, ` + + `time=${Date.now() - iterStart}ms`, + ); + const textParts = response.content .filter((block: ContentBlock): block is TextBlock => block.type === "text") .map((block: TextBlock) => block.text); @@ -60,9 +71,12 @@ export async function processWithMiniMax( ); if (toolCalls.length === 0 || !toolRegistry) { + logger.debug(`[MiniMax] Done in ${Date.now() - startTime}ms (${i + 1} iteration(s))`); return textParts.join("\n") || "No response from MiniMax"; } + logger.debug(`[MiniMax] Tool calls: ${toolCalls.map((t) => t.name).join(", ")}`); + messages.push({ role: "assistant", content: response.content }); const toolResults: ToolResultBlockParam[] = []; @@ -81,8 +95,10 @@ export async function processWithMiniMax( messages.push({ role: "user", content: toolResults }); } + logger.warn(`[MiniMax] Reached max ${MAX_ITERATIONS} iterations`); return "Reached maximum tool iterations."; } catch (error: unknown) { + logger.error(`[MiniMax] API error after ${Date.now() - startTime}ms`, error); throw new Error( `MiniMax API error: ${error instanceof Error ? error.message : "Unknown error"}`, { cause: error }, diff --git a/src/providers/mistral.ts b/src/providers/mistral.ts index d88fe1f..7be41a1 100644 --- a/src/providers/mistral.ts +++ b/src/providers/mistral.ts @@ -5,6 +5,7 @@ import type { ChatCompletionMessageParam, ChatCompletionTool, } from "openai/resources/chat/completions/completions"; +import { logger } from "../shared/logger"; import { ToolRegistry } from "../tools/registry"; const MAX_ITERATIONS = 10; @@ -24,6 +25,9 @@ export async function processWithMistral( model: string, toolRegistry?: ToolRegistry, ): Promise { + const startTime = Date.now(); + logger.debug(`[Mistral] Request → model=${model}, prompt=${instruction.length} chars`); + try { const client = new OpenAI({ apiKey, @@ -40,6 +44,7 @@ export async function processWithMistral( ]; for (let i = 0; i < MAX_ITERATIONS; i++) { + const iterStart = Date.now(); const completion = await client.chat.completions.create({ model, messages, @@ -50,10 +55,21 @@ export async function processWithMistral( const choice = completion.choices[0]; const assistantMsg = choice.message; + logger.debug( + `[Mistral] Response ← iteration=${i + 1}, finish=${choice.finish_reason}, ` + + `tokens=${completion.usage?.prompt_tokens ?? "?"}in/${completion.usage?.completion_tokens ?? "?"}out, ` + + `time=${Date.now() - iterStart}ms`, + ); + if (!assistantMsg.tool_calls || assistantMsg.tool_calls.length === 0 || !toolRegistry) { + logger.debug(`[Mistral] Done in ${Date.now() - startTime}ms (${i + 1} iteration(s))`); return assistantMsg.content || "No response from Mistral AI"; } + logger.debug( + `[Mistral] Tool calls: ${assistantMsg.tool_calls.map((t) => ("function" in t ? t.function.name : t.type)).join(", ")}`, + ); + messages.push(assistantMsg); for (const toolCall of assistantMsg.tool_calls) { @@ -70,8 +86,10 @@ export async function processWithMistral( } } + logger.warn(`[Mistral] Reached max ${MAX_ITERATIONS} iterations`); return "Reached maximum tool iterations."; } catch (error: unknown) { + logger.error(`[Mistral] API error after ${Date.now() - startTime}ms`, error); throw new Error( `Mistral AI API error: ${error instanceof Error ? error.message : "Unknown error"}`, { cause: error }, diff --git a/src/providers/moonshot.ts b/src/providers/moonshot.ts index a1b40b6..29b77fd 100644 --- a/src/providers/moonshot.ts +++ b/src/providers/moonshot.ts @@ -5,6 +5,7 @@ import type { ChatCompletionMessageParam, ChatCompletionTool, } from "openai/resources/chat/completions/completions"; +import { logger } from "../shared/logger"; import { ToolRegistry } from "../tools/registry"; const MAX_ITERATIONS = 10; @@ -24,6 +25,9 @@ export async function processWithMoonshot( model: string, toolRegistry?: ToolRegistry, ): Promise { + const startTime = Date.now(); + logger.debug(`[Moonshot] Request → model=${model}, prompt=${instruction.length} chars`); + try { const client = new OpenAI({ apiKey, @@ -40,6 +44,7 @@ export async function processWithMoonshot( ]; for (let i = 0; i < MAX_ITERATIONS; i++) { + const iterStart = Date.now(); const completion = await client.chat.completions.create({ model, messages, @@ -50,10 +55,21 @@ export async function processWithMoonshot( const choice = completion.choices[0]; const assistantMsg = choice.message; + logger.debug( + `[Moonshot] Response ← iteration=${i + 1}, finish=${choice.finish_reason}, ` + + `tokens=${completion.usage?.prompt_tokens ?? "?"}in/${completion.usage?.completion_tokens ?? "?"}out, ` + + `time=${Date.now() - iterStart}ms`, + ); + if (!assistantMsg.tool_calls || assistantMsg.tool_calls.length === 0 || !toolRegistry) { + logger.debug(`[Moonshot] Done in ${Date.now() - startTime}ms (${i + 1} iteration(s))`); return assistantMsg.content || "No response from Moonshot AI"; } + logger.debug( + `[Moonshot] Tool calls: ${assistantMsg.tool_calls.map((t) => ("function" in t ? t.function.name : t.type)).join(", ")}`, + ); + messages.push(assistantMsg); for (const toolCall of assistantMsg.tool_calls) { @@ -70,8 +86,10 @@ export async function processWithMoonshot( } } + logger.warn(`[Moonshot] Reached max ${MAX_ITERATIONS} iterations`); return "Reached maximum tool iterations."; } catch (error: unknown) { + logger.error(`[Moonshot] API error after ${Date.now() - startTime}ms`, error); throw new Error( `Moonshot AI API error: ${error instanceof Error ? error.message : "Unknown error"}`, { cause: error }, diff --git a/src/providers/openai.ts b/src/providers/openai.ts index e979952..a377e79 100644 --- a/src/providers/openai.ts +++ b/src/providers/openai.ts @@ -5,6 +5,7 @@ import type { ChatCompletionMessageParam, ChatCompletionTool, } from "openai/resources/chat/completions/completions"; +import { logger } from "../shared/logger"; import { ToolRegistry } from "../tools/registry"; const MAX_ITERATIONS = 10; @@ -24,6 +25,9 @@ export async function processWithOpenAI( model: string, toolRegistry?: ToolRegistry, ): Promise { + const startTime = Date.now(); + logger.debug(`[OpenAI] Request → model=${model}, prompt=${instruction.length} chars`); + try { const openai = new OpenAI({ apiKey }); @@ -37,6 +41,7 @@ export async function processWithOpenAI( ]; for (let i = 0; i < MAX_ITERATIONS; i++) { + const iterStart = Date.now(); const completion = await openai.chat.completions.create({ model, messages, @@ -47,10 +52,21 @@ export async function processWithOpenAI( const choice = completion.choices[0]; const assistantMsg = choice.message; + logger.debug( + `[OpenAI] Response ← iteration=${i + 1}, finish=${choice.finish_reason}, ` + + `tokens=${completion.usage?.prompt_tokens ?? "?"}in/${completion.usage?.completion_tokens ?? "?"}out, ` + + `time=${Date.now() - iterStart}ms`, + ); + if (!assistantMsg.tool_calls || assistantMsg.tool_calls.length === 0 || !toolRegistry) { + logger.debug(`[OpenAI] Done in ${Date.now() - startTime}ms (${i + 1} iteration(s))`); return assistantMsg.content || "No response from GPT"; } + logger.debug( + `[OpenAI] Tool calls: ${assistantMsg.tool_calls.map((t) => ("function" in t ? t.function.name : t.type)).join(", ")}`, + ); + messages.push(assistantMsg); for (const toolCall of assistantMsg.tool_calls) { @@ -67,8 +83,10 @@ export async function processWithOpenAI( } } + logger.warn(`[OpenAI] Reached max ${MAX_ITERATIONS} iterations`); return "Reached maximum tool iterations."; } catch (error: unknown) { + logger.error(`[OpenAI] API error after ${Date.now() - startTime}ms`, error); throw new Error( `OpenAI API error: ${error instanceof Error ? error.message : "Unknown error"}`, { cause: error }, diff --git a/src/providers/openrouter.ts b/src/providers/openrouter.ts index d4e9a7e..ec84ea0 100644 --- a/src/providers/openrouter.ts +++ b/src/providers/openrouter.ts @@ -5,6 +5,7 @@ import type { ChatCompletionMessageParam, ChatCompletionTool, } from "openai/resources/chat/completions/completions"; +import { logger } from "../shared/logger"; import { ToolRegistry } from "../tools/registry"; const MAX_ITERATIONS = 10; @@ -24,6 +25,9 @@ export async function processWithOpenRouter( model: string, toolRegistry?: ToolRegistry, ): Promise { + const startTime = Date.now(); + logger.debug(`[OpenRouter] Request → model=${model}, prompt=${instruction.length} chars`); + try { const client = new OpenAI({ apiKey, @@ -44,6 +48,7 @@ export async function processWithOpenRouter( ]; for (let i = 0; i < MAX_ITERATIONS; i++) { + const iterStart = Date.now(); const completion = await client.chat.completions.create({ model, messages, @@ -54,10 +59,21 @@ export async function processWithOpenRouter( const choice = completion.choices[0]; const assistantMsg = choice.message; + logger.debug( + `[OpenRouter] Response ← iteration=${i + 1}, finish=${choice.finish_reason}, ` + + `tokens=${completion.usage?.prompt_tokens ?? "?"}in/${completion.usage?.completion_tokens ?? "?"}out, ` + + `time=${Date.now() - iterStart}ms`, + ); + if (!assistantMsg.tool_calls || assistantMsg.tool_calls.length === 0 || !toolRegistry) { + logger.debug(`[OpenRouter] Done in ${Date.now() - startTime}ms (${i + 1} iteration(s))`); return assistantMsg.content || "No response from OpenRouter"; } + logger.debug( + `[OpenRouter] Tool calls: ${assistantMsg.tool_calls.map((t) => ("function" in t ? t.function.name : t.type)).join(", ")}`, + ); + messages.push(assistantMsg); for (const toolCall of assistantMsg.tool_calls) { @@ -74,8 +90,10 @@ export async function processWithOpenRouter( } } + logger.warn(`[OpenRouter] Reached max ${MAX_ITERATIONS} iterations`); return "Reached maximum tool iterations."; } catch (error: unknown) { + logger.error(`[OpenRouter] API error after ${Date.now() - startTime}ms`, error); throw new Error( `OpenRouter API error: ${error instanceof Error ? error.message : "Unknown error"}`, { cause: error }, diff --git a/src/providers/xai.ts b/src/providers/xai.ts index 9faf52b..fd8707d 100644 --- a/src/providers/xai.ts +++ b/src/providers/xai.ts @@ -5,6 +5,7 @@ import type { ChatCompletionMessageParam, ChatCompletionTool, } from "openai/resources/chat/completions/completions"; +import { logger } from "../shared/logger"; import { ToolRegistry } from "../tools/registry"; const MAX_ITERATIONS = 10; @@ -24,6 +25,9 @@ export async function processWithXAI( model: string, toolRegistry?: ToolRegistry, ): Promise { + const startTime = Date.now(); + logger.debug(`[xAI] Request → model=${model}, prompt=${instruction.length} chars`); + try { const client = new OpenAI({ apiKey, @@ -40,6 +44,7 @@ export async function processWithXAI( ]; for (let i = 0; i < MAX_ITERATIONS; i++) { + const iterStart = Date.now(); const completion = await client.chat.completions.create({ model, messages, @@ -50,10 +55,21 @@ export async function processWithXAI( const choice = completion.choices[0]; const assistantMsg = choice.message; + logger.debug( + `[xAI] Response ← iteration=${i + 1}, finish=${choice.finish_reason}, ` + + `tokens=${completion.usage?.prompt_tokens ?? "?"}in/${completion.usage?.completion_tokens ?? "?"}out, ` + + `time=${Date.now() - iterStart}ms`, + ); + if (!assistantMsg.tool_calls || assistantMsg.tool_calls.length === 0 || !toolRegistry) { + logger.debug(`[xAI] Done in ${Date.now() - startTime}ms (${i + 1} iteration(s))`); return assistantMsg.content || "No response from xAI"; } + logger.debug( + `[xAI] Tool calls: ${assistantMsg.tool_calls.map((t) => ("function" in t ? t.function.name : t.type)).join(", ")}`, + ); + messages.push(assistantMsg); for (const toolCall of assistantMsg.tool_calls) { @@ -70,8 +86,10 @@ export async function processWithXAI( } } + logger.warn(`[xAI] Reached max ${MAX_ITERATIONS} iterations`); return "Reached maximum tool iterations."; } catch (error: unknown) { + logger.error(`[xAI] API error after ${Date.now() - startTime}ms`, error); throw new Error(`xAI API error: ${error instanceof Error ? error.message : "Unknown error"}`, { cause: error, }); diff --git a/src/shared/logger.ts b/src/shared/logger.ts index 79d89d1..22c6778 100644 --- a/src/shared/logger.ts +++ b/src/shared/logger.ts @@ -1,3 +1,4 @@ +import { execSync } from "child_process"; import fs from "fs"; import os from "os"; import path from "path"; @@ -5,10 +6,13 @@ import path from "path"; export const LOG_DIR = path.join(os.homedir(), ".txtcode", "logs"); const RETENTION_DAYS = 7; +// eslint-disable-next-line no-control-regex +const ANSI_REGEX = /\x1b\[[0-9;]*m/g; + class Logger { - private stream: fs.WriteStream | null = null; private initialized = false; private sessionFile: string = ""; + private fd: number | null = null; private ensureDir(): void { if (this.initialized) { @@ -16,20 +20,26 @@ class Logger { } this.initialized = true; try { - // Ensure parent .txtcode dir exists first, then logs subdir - const txtcodeDir = path.join(os.homedir(), ".txtcode"); - if (!fs.existsSync(txtcodeDir)) { - fs.mkdirSync(txtcodeDir, { mode: 0o700, recursive: true }); - } if (!fs.existsSync(LOG_DIR)) { fs.mkdirSync(LOG_DIR, { recursive: true }); } + // On Windows, reset ACLs so files are readable by the current user + if (process.platform === "win32") { + try { + execSync(`icacls "${LOG_DIR}" /reset /t /c /q`, { stdio: "ignore" }); + } catch { + // ignore — best effort + } + } + this.sessionFile = path.join(LOG_DIR, `session-${this.fileTimestamp()}.log`); + // Open file with shared read access so other programs (Notepad etc.) can read it + this.fd = fs.openSync( + this.sessionFile, + fs.constants.O_WRONLY | fs.constants.O_CREAT | fs.constants.O_APPEND, + 0o666, + ); this.cleanOldLogs(); - this.stream = fs.createWriteStream(this.sessionFile, { flags: "a" }); - this.stream.on("error", () => { - this.stream = null; - }); } catch { // Logger should never crash the app } @@ -44,12 +54,18 @@ class Logger { continue; } const fullPath = path.join(LOG_DIR, file); - const stat = fs.statSync(fullPath); - if (stat.mtimeMs < cutoff) { - fs.unlinkSync(fullPath); + try { + const stat = fs.statSync(fullPath); + if (stat.mtimeMs < cutoff) { + fs.unlinkSync(fullPath); + } + } catch { + // Skip files that can't be accessed } } - } catch {} + } catch { + // Ignore cleanup errors + } } private fileTimestamp(): string { @@ -71,32 +87,62 @@ class Logger { const h = String(now.getHours()).padStart(2, "0"); const min = String(now.getMinutes()).padStart(2, "0"); const s = String(now.getSeconds()).padStart(2, "0"); - return `${y}-${m}-${d} ${h}:${min}:${s}`; + const ms = String(now.getMilliseconds()).padStart(3, "0"); + return `${y}-${m}-${d} ${h}:${min}:${s}.${ms}`; + } + + private strip(msg: string): string { + return msg.replace(ANSI_REGEX, ""); } private writeToFile(level: string, msg: string): void { this.ensureDir(); - if (this.stream) { + if (this.fd === null) { + return; + } + try { const line = `[${this.timestamp()}] [${level}] ${msg}\n`; - this.stream.write(line); + fs.writeSync(this.fd, line); + } catch { + // If fd became invalid, try to reopen + try { + if (!fs.existsSync(LOG_DIR)) { + fs.mkdirSync(LOG_DIR, { recursive: true }); + } + this.fd = fs.openSync( + this.sessionFile, + fs.constants.O_WRONLY | fs.constants.O_CREAT | fs.constants.O_APPEND, + 0o666, + ); + const line = `[${this.timestamp()}] [${level}] ${msg}\n`; + fs.writeSync(this.fd, line); + } catch { + // Logger should never crash the app + } } } info(msg: string): void { console.log(msg); - // eslint-disable-next-line no-control-regex - this.writeToFile("INFO", msg.replace(/\x1b\[[0-9;]*m/g, "")); + this.writeToFile("INFO", this.strip(msg)); } debug(msg: string): void { - // eslint-disable-next-line no-control-regex - this.writeToFile("DEBUG", msg.replace(/\x1b\[[0-9;]*m/g, "")); + this.writeToFile("DEBUG", this.strip(msg)); + } + + warn(msg: string): void { + this.writeToFile("WARN", this.strip(msg)); } error(msg: string, err?: unknown): void { - const errStr = err instanceof Error ? `: ${err.message}` : err ? `: ${String(err)}` : ""; - // eslint-disable-next-line no-control-regex - this.writeToFile("ERROR", msg.replace(/\x1b\[[0-9;]*m/g, "") + errStr); + const errStr = + err instanceof Error + ? `: ${err.message}${err.stack ? `\n${err.stack}` : ""}` + : err + ? `: ${String(err)}` + : ""; + this.writeToFile("ERROR", this.strip(msg) + errStr); } getLogPath(): string {