Skip to content
This repository was archived by the owner on Nov 26, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ jobs:
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }}
key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-${{ matrix.target }}-

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ jobs:
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }}
key: ${{ runner.os }}-cargo-${{ matrix.target }}-${{ hashFiles('Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-${{ matrix.target }}-

Expand Down
14 changes: 14 additions & 0 deletions examples/flow-news.ro
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
flow fetchNews {
step search {
prompt "Get top 5 tech news today"
output { headlines: string[] }
}
}

flow summarizeNews {
uses fetchNews
step summarize {
prompt "Summarize these headlines in 3 sentences"
output { summary: string }
}
}
7 changes: 6 additions & 1 deletion rohas-cli/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use rohas_core::{Lexer, Parser};
use rohas_flow::FlowEngine;
use rohas_llm::{LLMProvider, ProviderConfig};
use rohas_optimizer::TokenSaver;
use rohas_runtime::{Executor, Value};
use rohas_runtime::Executor;
use std::fs;
use std::path::PathBuf;

Expand Down Expand Up @@ -113,6 +113,11 @@ fn main() -> anyhow::Result<()> {
}

let flow_engine = FlowEngine::new(executor);
for statement in &program.statements {
if let rohas_core::ast::Statement::FlowStatement { name, .. } = statement {
flow_engine.register_flow(name.clone(), statement.clone());
}
}

let mut executor_guard = flow_engine.executor.lock().unwrap();
let mut results = Vec::new();
Expand Down
2 changes: 2 additions & 0 deletions rohas-core/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ pub enum Statement {

FlowStatement {
name: String,
uses: Vec<String>,
steps: Vec<FlowStep>,
},
StepStatement {
Expand Down Expand Up @@ -115,6 +116,7 @@ pub struct FlowStep {
pub parallel: bool,
pub condition: Option<Expression>,
pub retry: Option<RetryConfig>,
pub output: Option<Expression>,
}

#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
Expand Down
2 changes: 2 additions & 0 deletions rohas-core/src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ impl Lexer {
keywords.insert("async".to_string(), Token::Async);
keywords.insert("await".to_string(), Token::Await);
keywords.insert("use".to_string(), Token::Use);
keywords.insert("uses".to_string(), Token::Use);
keywords.insert("import".to_string(), Token::Import);
keywords.insert("export".to_string(), Token::Export);
keywords.insert("type".to_string(), Token::Type);
Expand All @@ -42,6 +43,7 @@ impl Lexer {
keywords.insert("state".to_string(), Token::State);
keywords.insert("print".to_string(), Token::Print);
keywords.insert("input".to_string(), Token::Input);
keywords.insert("output".to_string(), Token::Output);

keywords.insert("flow".to_string(), Token::Flow);
keywords.insert("step".to_string(), Token::Step);
Expand Down
95 changes: 92 additions & 3 deletions rohas-core/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ impl Parser {
Some(Token::ToolCall) => self.parse_tool_call_statement(),
Some(Token::Call) => self.parse_call_statement(),
Some(Token::Print) => self.parse_print_statement(),
Some(Token::Output) => self.parse_output_statement(),
Some(Token::Identifier(_)) => {

let start_pos = self.current;
Expand Down Expand Up @@ -254,7 +255,7 @@ impl Parser {
fn parse_use_statement(&mut self) -> Result<Statement, ParseError> {
self.consume(Token::Use)?;
self.skip_whitespace();
let name = self.consume_identifier()?;
let _name = self.consume_identifier()?;
self.skip_whitespace();
self.consume_semicolon_optional();

Expand Down Expand Up @@ -351,6 +352,53 @@ impl Parser {
Ok(Statement::PrintStatement { expression })
}

fn parse_output_statement(&mut self) -> Result<Statement, ParseError> {
self.consume(Token::Output)?;
self.skip_whitespace();
self.consume(Token::LeftBrace)?;
self.skip_whitespace();
let mut properties = HashMap::new();
while !self.check(Token::RightBrace) {
let key = self.consume_identifier()?;
self.skip_whitespace();
self.consume(Token::Colon)?;
self.skip_whitespace();
let value = if let Some(Token::Identifier(_)) = self.peek() {
let start_pos = self.current;
let type_name = self.consume_identifier()?;
let is_array = self.match_token(Token::LeftBracket);
if is_array {
self.consume(Token::RightBracket)?;
if self.check(Token::LeftParen) || self.check(Token::Dot) || self.check(Token::LeftBracket) {
self.current = start_pos;
self.parse_expression()?
} else {
Expression::Literal(Literal::String(format!("{}[]", type_name)))
}
} else {
if self.check(Token::LeftParen) || self.check(Token::Dot) || self.check(Token::LeftBracket) {
self.current = start_pos;
self.parse_expression()?
} else {
Expression::Literal(Literal::String(type_name))
}
}
} else {
self.parse_expression()?
};
properties.insert(key, value);
self.skip_whitespace();
if !self.check(Token::RightBrace) {
self.match_token(Token::Comma);
self.skip_whitespace();
}
}
self.consume(Token::RightBrace)?;
Ok(Statement::ExpressionStatement {
expression: Expression::ObjectLiteral { properties },
})
}

fn parse_parallel_step_statement(&mut self) -> Result<Statement, ParseError> {

self.consume(Token::Parallel)?;
Expand Down Expand Up @@ -688,8 +736,30 @@ impl Parser {
self.consume(Token::Flow)?;
let name = self.consume_identifier()?;
self.consume(Token::LeftBrace)?;
let mut uses = Vec::new();
let mut steps = Vec::new();

while !self.check(Token::RightBrace) {
self.skip_whitespace();
if self.check(Token::RightBrace) {
break;
}

if self.match_token(Token::Use) {
let flow_name = self.consume_identifier()?;
uses.push(flow_name);
self.skip_whitespace();
self.consume_semicolon_optional();
continue;
}

if self.check(Token::Step) || self.check(Token::Parallel) {
break;
}

break;
}

while !self.check(Token::RightBrace) {
self.skip_whitespace();
if self.check(Token::RightBrace) {
Expand Down Expand Up @@ -738,19 +808,38 @@ impl Parser {
None
};
self.consume(Token::LeftBrace)?;
let statements = self.parse_block()?;
let mut statements = Vec::new();
let mut output = None;

while !self.check(Token::RightBrace) && !self.is_at_end() {
self.skip_whitespace();
if self.check(Token::RightBrace) {
break;
}

if self.check(Token::Output) {
let output_stmt = self.parse_output_statement()?;
if let Statement::ExpressionStatement { expression } = output_stmt {
output = Some(expression);
}
} else {
statements.push(self.parse_statement()?);
}
}

self.consume(Token::RightBrace)?;
steps.push(FlowStep {
name: step_name,
statements,
parallel,
condition,
retry,
output,
});
}

self.consume(Token::RightBrace)?;
Ok(Statement::FlowStatement { name, steps })
Ok(Statement::FlowStatement { name, uses, steps })
}

fn parse_step_statement(&mut self) -> Result<Statement, ParseError> {
Expand Down
2 changes: 2 additions & 0 deletions rohas-core/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ pub enum Token {
State,
Print,
Input,
Output,

Flow,
Step,
Expand Down Expand Up @@ -126,6 +127,7 @@ impl Token {
| Token::State
| Token::Print
| Token::Input
| Token::Output
| Token::Flow
| Token::Step
| Token::Parallel
Expand Down
Loading