From f96c0b11a623bcca168413871fdfa7fefee50e51 Mon Sep 17 00:00:00 2001 From: sophatvathana Date: Sat, 22 Nov 2025 22:10:05 +0700 Subject: [PATCH 1/4] feat: enhance tool support parameter definitions for LLM prompt --- examples/tool-call-example.ro | 14 +-- plugins/weather-tool/plugin.toml | 8 ++ plugins/weather-tool/weatherTool.py | 126 +++++++++++++++++++++++ rohas-core/src/parser.rs | 51 +++++++++- rohas-llm/src/providers/openai.rs | 36 ++++++- rohas-runtime/src/executor.rs | 152 ++++++++++++++++++++++++---- 6 files changed, 356 insertions(+), 31 deletions(-) create mode 100644 plugins/weather-tool/plugin.toml create mode 100755 plugins/weather-tool/weatherTool.py diff --git a/examples/tool-call-example.ro b/examples/tool-call-example.ro index 6627f1e..384c615 100644 --- a/examples/tool-call-example.ro +++ b/examples/tool-call-example.ro @@ -1,23 +1,25 @@ -use weatherTool - function buildWeatherPrompt(city): String { - promptText = "What's the weather like in " + city + "?" + promptText = "What's the weather like in " + city + "? Use the weatherTool to get the current weather." return promptText } city = "San Francisco" promptText = buildWeatherPrompt(city) +print "Prompt: " + promptText + prompt promptText tools: [ { name: "weatherTool", - description: "Provides forecast data" + description: "Provides forecast data for a given city. Returns temperature, conditions, and forecast.", + parameters: { + city: "String" + } } ] model: "gpt-4" temperature: 0.2 maxTokens: 200 -call weatherTool(city) - + \ No newline at end of file diff --git a/plugins/weather-tool/plugin.toml b/plugins/weather-tool/plugin.toml new file mode 100644 index 0000000..04cf90a --- /dev/null +++ b/plugins/weather-tool/plugin.toml @@ -0,0 +1,8 @@ +name = "weatherTool" +type = "subprocess" +description = "Weather tool that provides forecast data for a given city" +version = "1.0.0" +path = "weatherTool.py" +interpreter = "python3" +args = [] + diff --git a/plugins/weather-tool/weatherTool.py b/plugins/weather-tool/weatherTool.py new file mode 100755 index 0000000..1c3058c --- /dev/null +++ b/plugins/weather-tool/weatherTool.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python3 + +import json +import sys +from typing import Any, Dict, List, Union +import random + +def parse_value(value: Any) -> Dict[str, Any]: + """Convert Python value to Pronto Value format""" + if value is None: + return {"Null": None} + elif isinstance(value, str): + return {"String": value} + elif isinstance(value, (int, float)): + return {"Number": float(value)} + elif isinstance(value, bool): + return {"Boolean": value} + elif isinstance(value, list): + return {"Array": [parse_value(v) for v in value]} + elif isinstance(value, dict): + return {"Object": {k: parse_value(v) for k, v in value.items()}} + else: + return {"Null": None} + +def extract_value(value: Dict[str, Any]) -> Any: + """Extract Python value from Pronto Value format""" + if "String" in value: + return value["String"] + elif "Number" in value: + return value["Number"] + elif "Boolean" in value: + return value["Boolean"] + elif "Null" in value: + return None + elif "Array" in value: + return [extract_value(v) for v in value["Array"]] + elif "Object" in value: + return {k: extract_value(v) for k, v in value["Object"].items()} + else: + return None + +def get_weather(city: str) -> Dict[str, Any]: + """Get weather information for a given city""" + conditions = ["Sunny", "Cloudy", "Partly Cloudy", "Rainy", "Clear"] + temperatures = { + "San Francisco": (15, 20), + "New York": (10, 18), + "London": (8, 15), + "Tokyo": (12, 22), + "Paris": (10, 16), + } + + temp_range = temperatures.get(city, (10, 25)) + temp = round(random.uniform(temp_range[0], temp_range[1]), 1) + condition = random.choice(conditions) + + return { + "city": city, + "temperature": temp, + "condition": condition, + "unit": "Celsius", + "forecast": f"{condition} with a temperature of {temp}°C" + } + +def main(): + try: + input_data = sys.stdin.read() + request = json.loads(input_data) + + if request.get("tool") != "weatherTool": + response = { + "success": False, + "error": f"Unknown tool: {request.get('tool')}", + } + print(json.dumps(response)) + sys.exit(1) + + args = request.get("arguments", []) + + city = None + + if len(args) == 0: + response = { + "success": False, + "error": "weatherTool requires a city argument", + } + print(json.dumps(response)) + sys.exit(1) + + if len(args) == 1 and isinstance(args[0], dict) and "Object" in args[0]: + obj = extract_value(args[0]) + if isinstance(obj, dict) and "city" in obj: + city = obj["city"] + elif isinstance(obj, dict) and len(obj) == 1: + city = list(obj.values())[0] + elif len(args) >= 1: + city = extract_value(args[0]) + + if not city or not isinstance(city, str): + response = { + "success": False, + "error": "weatherTool requires a city argument (String)", + } + print(json.dumps(response)) + sys.exit(1) + + weather_data = get_weather(city) + + response = { + "success": True, + "result": parse_value(weather_data), + } + + print(json.dumps(response)) + + except Exception as e: + response = { + "success": False, + "error": f"Plugin error: {str(e)}", + } + print(json.dumps(response)) + sys.exit(1) + +if __name__ == "__main__": + main() + diff --git a/rohas-core/src/parser.rs b/rohas-core/src/parser.rs index a03ac3e..c9ff1e1 100644 --- a/rohas-core/src/parser.rs +++ b/rohas-core/src/parser.rs @@ -464,10 +464,28 @@ impl Parser { } }) .ok_or(ParseError::InvalidExpression)?; + let parameters = properties + .get("parameters") + .and_then(|e| { + if let Expression::ObjectLiteral { properties: param_props } = e { + let mut params = HashMap::new(); + for (param_name, param_value) in param_props { + if let Expression::Literal(Literal::String(type_str)) = param_value { + if let Ok(param_type) = Self::parse_type_from_string(&type_str) { + params.insert(param_name.clone(), param_type); + } + } + } + Some(params) + } else { + None + } + }) + .unwrap_or_else(HashMap::new); tool_defs.push(ToolDefinition { name, description, - parameters: HashMap::new(), + parameters, }); } else { return Err(ParseError::InvalidExpression); @@ -603,7 +621,6 @@ impl Parser { "maxTokens" => *max_tokens = Some(value), "stream" => *stream = Some(value), "tools" => { - if let Expression::ArrayLiteral { elements } = value { let mut tool_defs = Vec::new(); for elem in elements { @@ -628,10 +645,28 @@ impl Parser { } }) .ok_or(ParseError::InvalidExpression)?; + let parameters = properties + .get("parameters") + .and_then(|e| { + if let Expression::ObjectLiteral { properties: param_props } = e { + let mut params = HashMap::new(); + for (param_name, param_value) in param_props { + if let Expression::Literal(Literal::String(type_str)) = param_value { + if let Ok(param_type) = Self::parse_type_from_string(&type_str) { + params.insert(param_name.clone(), param_type); + } + } + } + Some(params) + } else { + None + } + }) + .unwrap_or_else(HashMap::new); tool_defs.push(ToolDefinition { name, description, - parameters: HashMap::new(), + parameters, }); } else { return Err(ParseError::InvalidExpression); @@ -1222,6 +1257,16 @@ impl Parser { } } + fn parse_type_from_string(type_str: &str) -> Result { + match type_str { + "String" | "string" => Ok(Type::String), + "Number" | "number" => Ok(Type::Number), + "Boolean" | "boolean" => Ok(Type::Boolean), + "Any" => Ok(Type::Any), + _ => Ok(Type::Named(type_str.to_string())), + } + } + fn peek(&self) -> Option<&Token> { self.tokens.get(self.current).map(|t| &t.token) } diff --git a/rohas-llm/src/providers/openai.rs b/rohas-llm/src/providers/openai.rs index 9482125..430841d 100644 --- a/rohas-llm/src/providers/openai.rs +++ b/rohas-llm/src/providers/openai.rs @@ -45,7 +45,41 @@ impl Provider for OpenAIProvider { } if let Some(tools) = request.tools { - body["tools"] = json!(tools); + let openai_tools: Vec = tools + .into_iter() + .map(|tool| { + let mut properties = serde_json::Map::new(); + let mut required = Vec::new(); + for (param_name, param) in tool.parameters { + let mut prop = serde_json::Map::new(); + prop.insert("type".to_string(), json!(param.param_type)); + if let Some(desc) = param.description { + prop.insert("description".to_string(), json!(desc)); + } + properties.insert(param_name.clone(), json!(prop)); + if param.required.unwrap_or(false) { + required.push(param_name); + } + } + + let parameters = json!({ + "type": "object", + "properties": properties, + "required": required + }); + + json!({ + "type": "function", + "function": { + "name": tool.name, + "description": tool.description, + "parameters": parameters + } + }) + }) + .collect(); + + body["tools"] = json!(openai_tools); body["tool_choice"] = json!("auto"); } diff --git a/rohas-runtime/src/executor.rs b/rohas-runtime/src/executor.rs index a6e1852..cfe41cd 100644 --- a/rohas-runtime/src/executor.rs +++ b/rohas-runtime/src/executor.rs @@ -8,6 +8,7 @@ use rohas_llm::{LLMRequest, LLMRuntime, ProviderConfig}; use std::collections::HashMap; use std::io::{self, Write}; use tokio::runtime::Runtime; +use serde_json; type ToolFunction = Box Result + Send + Sync>; @@ -280,31 +281,44 @@ impl Executor { }); let llm_request = LLMRequest { - prompt: prompt_str, - model: model_str, + prompt: prompt_str.clone(), + model: model_str.clone(), temperature: temp, max_tokens: max_toks, stream: None, tools: tools.clone().map(|tools| { tools .into_iter() - .map(|t| rohas_llm::types::ToolDefinition { - name: t.name, - description: t.description, - parameters: t - .parameters - .into_iter() - .map(|(k, v)| { - ( - k, - rohas_llm::types::ToolParameter { - param_type: format!("{:?}", v), - description: None, - required: Some(true), - }, - ) - }) - .collect(), + .map(|t| { + let mut properties = std::collections::HashMap::new(); + let mut required = Vec::new(); + + for (param_name, param_type) in &t.parameters { + let type_str = match param_type { + rohas_core::ast::Type::String => "string", + rohas_core::ast::Type::Number => "number", + rohas_core::ast::Type::Boolean => "boolean", + rohas_core::ast::Type::Array(_) => "array", + rohas_core::ast::Type::Object(_) => "object", + _ => "string", + }; + + properties.insert( + param_name.clone(), + rohas_llm::types::ToolParameter { + param_type: type_str.to_string(), + description: None, + required: Some(true), + }, + ); + required.push(param_name.clone()); + } + + rohas_llm::types::ToolDefinition { + name: t.name, + description: t.description, + parameters: properties, + } }) .collect() }), @@ -315,11 +329,85 @@ impl Executor { .as_ref() .context("LLM runtime not configured")?; - let response = self + let mut response = self .tokio_runtime .block_on(llm_runtime.execute(llm_request))?; - Ok(Some(Value::String(response.content))) + if let Some(tool_calls) = &response.tool_calls { + let mut tool_results_for_llm = Vec::new(); + + for tool_call in tool_calls { + let args = if let serde_json::Value::Object(obj) = &tool_call.arguments { + let mut arg_map = HashMap::new(); + for (key, value) in obj { + arg_map.insert(key.clone(), self.json_to_value(value)?); + } + vec![Value::Object(arg_map)] + } else { + vec![self.json_to_value(&tool_call.arguments)?] + }; + + let result = if let Ok(result) = self.tools.call(&tool_call.name, &args) { + result + } else if let Ok(result) = self.plugins.call(&tool_call.name, &args) { + result + } else { + Value::String(format!("Tool '{}' not found or failed", tool_call.name)) + }; + + let result_json = result.to_json(); + + tool_results_for_llm.push(serde_json::json!({ + "tool_call_id": tool_call.id, + "name": tool_call.name, + "content": serde_json::to_string(&result_json).unwrap_or_else(|_| result.to_string()) + })); + } + + if !tool_results_for_llm.is_empty() { + let follow_up_prompt = if response.content.is_empty() { + "Tool execution completed. Please provide a response based on the tool results.".to_string() + } else { + format!("{}\n\nTool execution completed. Please provide a final response based on the tool results.", response.content) + }; + + let tool_results_text: Vec = tool_results_for_llm + .iter() + .map(|tr| { + format!( + "Tool {} returned: {}", + tr["name"].as_str().unwrap_or("unknown"), + tr["content"].as_str().unwrap_or("") + ) + }) + .collect(); + + let final_prompt = format!( + "{}\n\nTool Results:\n{}", + follow_up_prompt, + tool_results_text.join("\n") + ); + + let follow_up_request = LLMRequest { + prompt: final_prompt, + model: model_str.clone(), + temperature: temp, + max_tokens: max_toks, + stream: None, + tools: None, // Don't send tools again in follow-up + }; + + let final_response = self + .tokio_runtime + .block_on(llm_runtime.execute(follow_up_request))?; + + Ok(Some(Value::String(final_response.content))) + } else { + Ok(Some(Value::String(response.content))) + } + } else { + Ok(Some(Value::String(response.content))) + } } Statement::ToolCallStatement { tool_name, @@ -624,4 +712,26 @@ impl Executor { }, } } + + fn json_to_value(&self, json: &serde_json::Value) -> Result { + match json { + serde_json::Value::Null => Ok(Value::Null), + serde_json::Value::Bool(b) => Ok(Value::Boolean(*b)), + serde_json::Value::Number(n) => Ok(Value::Number( + n.as_f64().ok_or_else(|| anyhow::anyhow!("Invalid number"))?, + )), + serde_json::Value::String(s) => Ok(Value::String(s.clone())), + serde_json::Value::Array(arr) => { + let values: Result> = arr.iter().map(|v| self.json_to_value(v)).collect(); + Ok(Value::Array(values?)) + } + serde_json::Value::Object(obj) => { + let mut map = HashMap::new(); + for (k, v) in obj { + map.insert(k.clone(), self.json_to_value(v)?); + } + Ok(Value::Object(map)) + } + } + } } From 8e31bd0de4982473a3754d48eaf5d2dbf7c8cae0 Mon Sep 17 00:00:00 2001 From: sophatvathana Date: Sat, 22 Nov 2025 23:01:06 +0700 Subject: [PATCH 2/4] feat: implement flow statement enhancements with output handling and flow registration --- examples/flow-news.ro | 14 ++++ rohas-cli/src/main.rs | 7 +- rohas-core/src/ast.rs | 2 + rohas-core/src/lexer.rs | 2 + rohas-core/src/parser.rs | 95 ++++++++++++++++++++++++++- rohas-core/src/token.rs | 2 + rohas-flow/src/engine.rs | 134 +++++++++++++++++++++++++++++++++++++-- 7 files changed, 248 insertions(+), 8 deletions(-) create mode 100644 examples/flow-news.ro diff --git a/examples/flow-news.ro b/examples/flow-news.ro new file mode 100644 index 0000000..cf71c8b --- /dev/null +++ b/examples/flow-news.ro @@ -0,0 +1,14 @@ +flow fetchNews { + step search { + prompt "Get top 5 tech news today" + output { headlines: string[] } + } +} + +flow summarizeNews { + uses fetchNews + step summarize { + prompt "Summarize these headlines in 3 sentences" + output { summary: string } + } +} diff --git a/rohas-cli/src/main.rs b/rohas-cli/src/main.rs index 5f7eaa7..15357a7 100644 --- a/rohas-cli/src/main.rs +++ b/rohas-cli/src/main.rs @@ -4,7 +4,7 @@ use rohas_core::{Lexer, Parser}; use rohas_flow::FlowEngine; use rohas_llm::{LLMProvider, ProviderConfig}; use rohas_optimizer::TokenSaver; -use rohas_runtime::{Executor, Value}; +use rohas_runtime::Executor; use std::fs; use std::path::PathBuf; @@ -113,6 +113,11 @@ fn main() -> anyhow::Result<()> { } let flow_engine = FlowEngine::new(executor); + for statement in &program.statements { + if let rohas_core::ast::Statement::FlowStatement { name, .. } = statement { + flow_engine.register_flow(name.clone(), statement.clone()); + } + } let mut executor_guard = flow_engine.executor.lock().unwrap(); let mut results = Vec::new(); diff --git a/rohas-core/src/ast.rs b/rohas-core/src/ast.rs index 69d8dd7..98d28dd 100644 --- a/rohas-core/src/ast.rs +++ b/rohas-core/src/ast.rs @@ -83,6 +83,7 @@ pub enum Statement { FlowStatement { name: String, + uses: Vec, steps: Vec, }, StepStatement { @@ -115,6 +116,7 @@ pub struct FlowStep { pub parallel: bool, pub condition: Option, pub retry: Option, + pub output: Option, } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] diff --git a/rohas-core/src/lexer.rs b/rohas-core/src/lexer.rs index dc74d91..e684d02 100644 --- a/rohas-core/src/lexer.rs +++ b/rohas-core/src/lexer.rs @@ -25,6 +25,7 @@ impl Lexer { keywords.insert("async".to_string(), Token::Async); keywords.insert("await".to_string(), Token::Await); keywords.insert("use".to_string(), Token::Use); + keywords.insert("uses".to_string(), Token::Use); keywords.insert("import".to_string(), Token::Import); keywords.insert("export".to_string(), Token::Export); keywords.insert("type".to_string(), Token::Type); @@ -42,6 +43,7 @@ impl Lexer { keywords.insert("state".to_string(), Token::State); keywords.insert("print".to_string(), Token::Print); keywords.insert("input".to_string(), Token::Input); + keywords.insert("output".to_string(), Token::Output); keywords.insert("flow".to_string(), Token::Flow); keywords.insert("step".to_string(), Token::Step); diff --git a/rohas-core/src/parser.rs b/rohas-core/src/parser.rs index c9ff1e1..eac473e 100644 --- a/rohas-core/src/parser.rs +++ b/rohas-core/src/parser.rs @@ -68,6 +68,7 @@ impl Parser { Some(Token::ToolCall) => self.parse_tool_call_statement(), Some(Token::Call) => self.parse_call_statement(), Some(Token::Print) => self.parse_print_statement(), + Some(Token::Output) => self.parse_output_statement(), Some(Token::Identifier(_)) => { let start_pos = self.current; @@ -254,7 +255,7 @@ impl Parser { fn parse_use_statement(&mut self) -> Result { self.consume(Token::Use)?; self.skip_whitespace(); - let name = self.consume_identifier()?; + let _name = self.consume_identifier()?; self.skip_whitespace(); self.consume_semicolon_optional(); @@ -351,6 +352,53 @@ impl Parser { Ok(Statement::PrintStatement { expression }) } + fn parse_output_statement(&mut self) -> Result { + self.consume(Token::Output)?; + self.skip_whitespace(); + self.consume(Token::LeftBrace)?; + self.skip_whitespace(); + let mut properties = HashMap::new(); + while !self.check(Token::RightBrace) { + let key = self.consume_identifier()?; + self.skip_whitespace(); + self.consume(Token::Colon)?; + self.skip_whitespace(); + let value = if let Some(Token::Identifier(_)) = self.peek() { + let start_pos = self.current; + let type_name = self.consume_identifier()?; + let is_array = self.match_token(Token::LeftBracket); + if is_array { + self.consume(Token::RightBracket)?; + if self.check(Token::LeftParen) || self.check(Token::Dot) || self.check(Token::LeftBracket) { + self.current = start_pos; + self.parse_expression()? + } else { + Expression::Literal(Literal::String(format!("{}[]", type_name))) + } + } else { + if self.check(Token::LeftParen) || self.check(Token::Dot) || self.check(Token::LeftBracket) { + self.current = start_pos; + self.parse_expression()? + } else { + Expression::Literal(Literal::String(type_name)) + } + } + } else { + self.parse_expression()? + }; + properties.insert(key, value); + self.skip_whitespace(); + if !self.check(Token::RightBrace) { + self.match_token(Token::Comma); + self.skip_whitespace(); + } + } + self.consume(Token::RightBrace)?; + Ok(Statement::ExpressionStatement { + expression: Expression::ObjectLiteral { properties }, + }) + } + fn parse_parallel_step_statement(&mut self) -> Result { self.consume(Token::Parallel)?; @@ -688,8 +736,30 @@ impl Parser { self.consume(Token::Flow)?; let name = self.consume_identifier()?; self.consume(Token::LeftBrace)?; + let mut uses = Vec::new(); let mut steps = Vec::new(); + while !self.check(Token::RightBrace) { + self.skip_whitespace(); + if self.check(Token::RightBrace) { + break; + } + + if self.match_token(Token::Use) { + let flow_name = self.consume_identifier()?; + uses.push(flow_name); + self.skip_whitespace(); + self.consume_semicolon_optional(); + continue; + } + + if self.check(Token::Step) || self.check(Token::Parallel) { + break; + } + + break; + } + while !self.check(Token::RightBrace) { self.skip_whitespace(); if self.check(Token::RightBrace) { @@ -738,7 +808,25 @@ impl Parser { None }; self.consume(Token::LeftBrace)?; - let statements = self.parse_block()?; + let mut statements = Vec::new(); + let mut output = None; + + while !self.check(Token::RightBrace) && !self.is_at_end() { + self.skip_whitespace(); + if self.check(Token::RightBrace) { + break; + } + + if self.check(Token::Output) { + let output_stmt = self.parse_output_statement()?; + if let Statement::ExpressionStatement { expression } = output_stmt { + output = Some(expression); + } + } else { + statements.push(self.parse_statement()?); + } + } + self.consume(Token::RightBrace)?; steps.push(FlowStep { name: step_name, @@ -746,11 +834,12 @@ impl Parser { parallel, condition, retry, + output, }); } self.consume(Token::RightBrace)?; - Ok(Statement::FlowStatement { name, steps }) + Ok(Statement::FlowStatement { name, uses, steps }) } fn parse_step_statement(&mut self) -> Result { diff --git a/rohas-core/src/token.rs b/rohas-core/src/token.rs index e24eacc..1e3af08 100644 --- a/rohas-core/src/token.rs +++ b/rohas-core/src/token.rs @@ -38,6 +38,7 @@ pub enum Token { State, Print, Input, + Output, Flow, Step, @@ -126,6 +127,7 @@ impl Token { | Token::State | Token::Print | Token::Input + | Token::Output | Token::Flow | Token::Step | Token::Parallel diff --git a/rohas-flow/src/engine.rs b/rohas-flow/src/engine.rs index aad80ab..07e9c61 100644 --- a/rohas-flow/src/engine.rs +++ b/rohas-flow/src/engine.rs @@ -1,10 +1,11 @@ use crate::memory::Memory; use crate::state::State; use crate::tools::ToolRegistry; -use anyhow::{Context, Result}; +use anyhow::Result; use rohas_core::ast::*; use rohas_runtime::value::Value; use rohas_runtime::Executor; +use std::collections::HashMap; use std::sync::{Arc, Mutex}; pub struct FlowEngine { @@ -12,6 +13,7 @@ pub struct FlowEngine { state: Arc>, memory: Arc>, tools: Arc>, + flows: Arc>>, } impl FlowEngine { @@ -21,13 +23,33 @@ impl FlowEngine { state: Arc::new(Mutex::new(State::new())), memory: Arc::new(Mutex::new(Memory::new(100))), tools: Arc::new(Mutex::new(ToolRegistry::new())), + flows: Arc::new(Mutex::new(HashMap::new())), } } + pub fn register_flow(&self, name: String, flow: Statement) { + let mut flows = self.flows.lock().unwrap(); + flows.insert(name, flow); + } + pub fn execute_flow(&self, flow_statement: &Statement) -> Result> { - if let Statement::FlowStatement { name, steps } = flow_statement { + if let Statement::FlowStatement { name: _name, uses, steps } = flow_statement { let mut results = Vec::new(); + for used_flow_name in uses { + let used_flow = { + let flows = self.flows.lock().unwrap(); + flows.get(used_flow_name).cloned() + }; + + if let Some(used_flow) = used_flow { + let used_results = self.execute_flow(&used_flow)?; + results.extend(used_results); + } else { + return Err(anyhow::anyhow!("Flow '{}' not found (used by '{}')", used_flow_name, _name)); + } + } + for step in steps { if let Some(condition) = &step.condition { @@ -56,14 +78,34 @@ impl FlowEngine { } } } else { - + let mut step_results = Vec::new(); + let mut last_prompt_result: Option = None; + for stmt in &step.statements { let mut executor = self.executor.lock().unwrap(); let result = executor.execute_statement(stmt)?; drop(executor); if let Some(value) = result { - results.push(value); + if let Value::String(_) = &value { + last_prompt_result = Some(value.clone()); + } + step_results.push(value); + } + } + + if let (Some(output_expr), Some(prompt_result)) = (&step.output, last_prompt_result) { + if let Value::String(response_text) = prompt_result { + if let Expression::ObjectLiteral { properties } = output_expr { + let formatted = self.format_output(&response_text, properties)?; + results.push(formatted); + } else { + results.extend(step_results); + } + } else { + results.extend(step_results); } + } else { + results.extend(step_results); } } @@ -147,4 +189,88 @@ impl FlowEngine { let mut state = self.state.lock().unwrap(); state.set_variable(key, value); } + + fn format_output(&self, response_text: &str, output_schema: &std::collections::HashMap) -> Result { + use std::collections::HashMap; + + let json_value: Result = serde_json::from_str(response_text); + + let mut output_obj = HashMap::new(); + + if let Ok(json) = json_value { + if let serde_json::Value::Object(obj) = json { + for (key, _type_expr) in output_schema { + if let Some(value) = obj.get(key) { + output_obj.insert(key.clone(), self.json_value_to_value(value)?); + } + } + } + } else { + for (key, type_expr) in output_schema { + if let Expression::Literal(Literal::String(type_str)) = type_expr { + if type_str.ends_with("[]") { + if let Some(array_start) = response_text.find('[') { + if let Some(array_end) = response_text[array_start..].find(']') { + let array_str = &response_text[array_start..array_start + array_end + 1]; + if let Ok(json_array) = serde_json::from_str::(array_str) { + if let serde_json::Value::Array(arr) = json_array { + let values: Vec = arr.iter() + .filter_map(|v| self.json_value_to_value(v).ok()) + .collect(); + output_obj.insert(key.clone(), Value::Array(values)); + continue; + } + } + } + } + let items: Vec = response_text + .lines() + .filter_map(|line| { + let line = line.trim(); + let cleaned = line + .trim_start_matches(|c: char| c.is_ascii_digit() || c == '.' || c == '-' || c == '*') + .trim(); + if !cleaned.is_empty() && cleaned.len() > 3 { + Some(Value::String(cleaned.to_string())) + } else { + None + } + }) + .take(10) + .collect(); + if !items.is_empty() { + output_obj.insert(key.clone(), Value::Array(items)); + } + } else { + output_obj.insert(key.clone(), Value::String(response_text.to_string())); + } + } + } + } + + Ok(Value::Object(output_obj)) + } + + fn json_value_to_value(&self, json: &serde_json::Value) -> Result { + use std::collections::HashMap; + match json { + serde_json::Value::String(s) => Ok(Value::String(s.clone())), + serde_json::Value::Number(n) => Ok(Value::Number(n.as_f64().unwrap_or(0.0))), + serde_json::Value::Bool(b) => Ok(Value::Boolean(*b)), + serde_json::Value::Null => Ok(Value::Null), + serde_json::Value::Array(arr) => { + let values: Result, _> = arr.iter() + .map(|v| self.json_value_to_value(v)) + .collect(); + Ok(Value::Array(values?)) + }, + serde_json::Value::Object(obj) => { + let mut map = HashMap::new(); + for (k, v) in obj { + map.insert(k.clone(), self.json_value_to_value(v)?); + } + Ok(Value::Object(map)) + }, + } + } } From a869957797a916c5ca655f1556a8866f98fadcf4 Mon Sep 17 00:00:00 2001 From: sophatvathana Date: Sat, 22 Nov 2025 23:09:38 +0700 Subject: [PATCH 3/4] fix: update cache key pattern in GitHub Actions workflows to include nested Cargo.lock files --- .github/workflows/build.yml | 2 +- .github/workflows/release.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 97a3720..b558a91 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -63,7 +63,7 @@ jobs: ~/.cargo/registry/cache/ ~/.cargo/git/db/ target/ - key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }} + key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('Cargo.lock', '*/Cargo.lock') }} restore-keys: | ${{ runner.os }}-cargo-${{ matrix.target }}- diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 08bfaa4..f5bf4bf 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -66,7 +66,7 @@ jobs: ~/.cargo/registry/cache/ ~/.cargo/git/db/ target/ - key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }} + key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('Cargo.lock', '*/Cargo.lock') }} restore-keys: | ${{ runner.os }}-cargo-${{ matrix.target }}- From 4d979f593c40f8c360c757314123b0973513d1c5 Mon Sep 17 00:00:00 2001 From: sophatvathana Date: Sat, 22 Nov 2025 23:15:46 +0700 Subject: [PATCH 4/4] fix: update cache key pattern in GitHub Actions workflows to remove nested Cargo.lock references --- .github/workflows/build.yml | 2 +- .github/workflows/release.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b558a91..64cea8f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -63,7 +63,7 @@ jobs: ~/.cargo/registry/cache/ ~/.cargo/git/db/ target/ - key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('Cargo.lock', '*/Cargo.lock') }} + key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('Cargo.lock') }} restore-keys: | ${{ runner.os }}-cargo-${{ matrix.target }}- diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f5bf4bf..13fbb5b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -66,7 +66,7 @@ jobs: ~/.cargo/registry/cache/ ~/.cargo/git/db/ target/ - key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('Cargo.lock', '*/Cargo.lock') }} + key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('Cargo.lock') }} restore-keys: | ${{ runner.os }}-cargo-${{ matrix.target }}-