diff --git a/examples/infinit-loop.ro b/examples/infinit-loop.ro new file mode 100644 index 0000000..5a38c57 --- /dev/null +++ b/examples/infinit-loop.ro @@ -0,0 +1,4 @@ +// comment just trigger code quality check GitHub +while true { + print "Hello, world!" +} diff --git a/rohas-core/src/lexer.rs b/rohas-core/src/lexer.rs index 6d4fce2..dc74d91 100644 --- a/rohas-core/src/lexer.rs +++ b/rohas-core/src/lexer.rs @@ -162,14 +162,12 @@ impl Lexer { '/' => { self.advance(); if self.match_char('/') { - let mut comment = String::new(); while !self.is_at_end() && self.peek() != Some('\n') { comment.push(self.advance().unwrap()); } Ok(Token::Comment(comment)) } else if self.match_char('*') { - let mut comment = String::new(); while !self.is_at_end() { if self.match_char('*') && self.peek() == Some('/') { @@ -288,7 +286,6 @@ impl Lexer { }; if is_dollar_brace { - if !current_text.is_empty() { parts.push(crate::token::TokenTemplatePart::Text(current_text.clone())); current_text.clear(); @@ -299,7 +296,6 @@ impl Lexer { expression_depth = 1; current_text.clear(); } else if is_simple_brace { - if !current_text.is_empty() { parts.push(crate::token::TokenTemplatePart::Text(current_text.clone())); current_text.clear(); @@ -334,7 +330,6 @@ impl Lexer { } else if ch == '}' { expression_depth -= 1; if expression_depth == 0 { - let expr_text = current_text.clone(); parts.push(crate::token::TokenTemplatePart::Expression(expr_text)); current_text.clear(); @@ -345,7 +340,6 @@ impl Lexer { self.advance(); } } else if ch == quote { - return Err(LexerError::UnterminatedString(self.line, self.column)); } else { current_text.push(ch); @@ -459,3 +453,554 @@ pub enum LexerError { #[error("Unterminated string at line {0}, column {1}")] UnterminatedString(usize, usize), } + +#[cfg(test)] +mod tests { + use crate::TokenTemplatePart; + + use super::*; + + #[test] + fn test_lexer() { + let mut lexer = Lexer::new("let x = 1;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens.len(), 6); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::Number("1".to_string())); + assert_eq!(tokens[4].token, Token::Semicolon); + assert_eq!(tokens[5].token, Token::Eof); + } + + #[test] + fn test_lexer_with_string() { + let mut lexer = Lexer::new("let x = \"Hello, world!\";"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens.len(), 6); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::String("Hello, world!".to_string())); + assert_eq!(tokens[4].token, Token::Semicolon); + assert_eq!(tokens[5].token, Token::Eof); + assert_eq!(tokens[0].span.start, 0); + assert_eq!(tokens[0].span.end, 3); + assert_eq!(tokens[0].span.line, 1); + assert_eq!(tokens[0].span.column, 1); + assert_eq!(tokens[1].span.start, 4); + assert_eq!(tokens[1].span.end, 5); + assert_eq!(tokens[1].span.line, 1); + assert_eq!(tokens[1].span.column, 5); + } + + #[test] + fn test_lexer_with_number() { + let mut lexer = Lexer::new("let x = 123;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens.len(), 6); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::Number("123".to_string())); + assert_eq!(tokens[4].token, Token::Semicolon); + assert_eq!(tokens[5].token, Token::Eof); + } + + #[test] + fn test_lexer_with_boolean() { + let mut lexer = Lexer::new("let x = true;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens.len(), 6); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::Boolean(true)); + assert_eq!(tokens[4].token, Token::Semicolon); + assert_eq!(tokens[5].token, Token::Eof); + } + + #[test] + fn test_lexer_with_null() { + let mut lexer = Lexer::new("let x = null;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens.len(), 6); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::Null); + assert_eq!(tokens[4].token, Token::Semicolon); + assert_eq!(tokens[5].token, Token::Eof); + } + + #[test] + fn test_lexer_with_string_template() { + let mut lexer = Lexer::new("let x = \"Hello, ${name}!\";"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens.len(), 6); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!( + tokens[3].token, + Token::TemplateString(vec![ + TokenTemplatePart::Text("Hello, ".to_string()), + TokenTemplatePart::Expression("name".to_string()), + TokenTemplatePart::Text("!".to_string()) + ]) + ); + assert_eq!(tokens[4].token, Token::Semicolon); + assert_eq!(tokens[5].token, Token::Eof); + } + + #[test] + fn test_lexer_with_float_number() { + let mut lexer = Lexer::new("let x = 3.14;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens.len(), 6); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::Number("3.14".to_string())); + assert_eq!(tokens[4].token, Token::Semicolon); + assert_eq!(tokens[5].token, Token::Eof); + } + + #[test] + fn test_lexer_with_operators() { + let mut lexer = Lexer::new("x + y - z * w / v % u"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[1].token, Token::Plus); + assert_eq!(tokens[2].token, Token::Identifier("y".to_string())); + assert_eq!(tokens[3].token, Token::Minus); + assert_eq!(tokens[4].token, Token::Identifier("z".to_string())); + assert_eq!(tokens[5].token, Token::Multiply); + assert_eq!(tokens[6].token, Token::Identifier("w".to_string())); + assert_eq!(tokens[7].token, Token::Divide); + assert_eq!(tokens[8].token, Token::Identifier("v".to_string())); + assert_eq!(tokens[9].token, Token::Modulo); + assert_eq!(tokens[10].token, Token::Identifier("u".to_string())); + } + + #[test] + fn test_lexer_with_comparison_operators() { + let mut lexer = Lexer::new("x == y != z < w <= v > u >= t"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[1].token, Token::Equal); + assert_eq!(tokens[2].token, Token::Identifier("y".to_string())); + assert_eq!(tokens[3].token, Token::NotEqual); + assert_eq!(tokens[4].token, Token::Identifier("z".to_string())); + assert_eq!(tokens[5].token, Token::LessThan); + assert_eq!(tokens[6].token, Token::Identifier("w".to_string())); + assert_eq!(tokens[7].token, Token::LessThanOrEqual); + assert_eq!(tokens[8].token, Token::Identifier("v".to_string())); + assert_eq!(tokens[9].token, Token::GreaterThan); + assert_eq!(tokens[10].token, Token::Identifier("u".to_string())); + assert_eq!(tokens[11].token, Token::GreaterThanOrEqual); + assert_eq!(tokens[12].token, Token::Identifier("t".to_string())); + } + + #[test] + fn test_lexer_with_logical_operators() { + let mut lexer = Lexer::new("x && y || z !w"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[1].token, Token::And); + assert_eq!(tokens[2].token, Token::Identifier("y".to_string())); + assert_eq!(tokens[3].token, Token::Or); + assert_eq!(tokens[4].token, Token::Identifier("z".to_string())); + assert_eq!(tokens[5].token, Token::Not); + assert_eq!(tokens[6].token, Token::Identifier("w".to_string())); + } + + #[test] + fn test_lexer_with_assign_operators() { + let mut lexer = Lexer::new("x += y -= z *= w /= v"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[1].token, Token::PlusAssign); + assert_eq!(tokens[2].token, Token::Identifier("y".to_string())); + assert_eq!(tokens[3].token, Token::MinusAssign); + assert_eq!(tokens[4].token, Token::Identifier("z".to_string())); + assert_eq!(tokens[5].token, Token::MultiplyAssign); + assert_eq!(tokens[6].token, Token::Identifier("w".to_string())); + assert_eq!(tokens[7].token, Token::DivideAssign); + assert_eq!(tokens[8].token, Token::Identifier("v".to_string())); + } + + #[test] + fn test_lexer_with_arrow_operator() { + let mut lexer = Lexer::new("x->y"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[1].token, Token::Arrow); + assert_eq!(tokens[2].token, Token::Identifier("y".to_string())); + } + + #[test] + fn test_lexer_with_pipe_operator() { + let mut lexer = Lexer::new("x | y || z"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[1].token, Token::Pipe); + assert_eq!(tokens[2].token, Token::Identifier("y".to_string())); + assert_eq!(tokens[3].token, Token::Or); + assert_eq!(tokens[4].token, Token::Identifier("z".to_string())); + } + + #[test] + fn test_lexer_with_single_line_comment() { + let mut lexer = Lexer::new("let x = 1; // This is a comment"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::Number("1".to_string())); + assert_eq!(tokens[4].token, Token::Semicolon); + assert_eq!(tokens[5].token, Token::Comment(" This is a comment".to_string())); + } + + #[test] + fn test_lexer_with_multi_line_comment() { + let mut lexer = Lexer::new("let x = 1; /* This is a\nmulti-line comment */"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::Number("1".to_string())); + assert_eq!(tokens[4].token, Token::Semicolon); + assert!(matches!(tokens[5].token, Token::Comment(_))); + } + + #[test] + fn test_lexer_with_keywords() { + let mut lexer = Lexer::new("const if else while for return function async await"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Const); + assert_eq!(tokens[1].token, Token::If); + assert_eq!(tokens[2].token, Token::Else); + assert_eq!(tokens[3].token, Token::While); + assert_eq!(tokens[4].token, Token::For); + assert_eq!(tokens[5].token, Token::Return); + assert_eq!(tokens[6].token, Token::Function); + assert_eq!(tokens[7].token, Token::Async); + assert_eq!(tokens[8].token, Token::Await); + } + + #[test] + fn test_lexer_with_more_keywords() { + let mut lexer = Lexer::new("use import export type interface record"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Use); + assert_eq!(tokens[1].token, Token::Import); + assert_eq!(tokens[2].token, Token::Export); + assert_eq!(tokens[3].token, Token::Type); + assert_eq!(tokens[4].token, Token::Interface); + assert_eq!(tokens[5].token, Token::Record); + } + + #[test] + fn test_lexer_with_special_keywords() { + let mut lexer = Lexer::new("prompt model temperature maxTokens stream tool toolCall call"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Prompt); + assert_eq!(tokens[1].token, Token::Model); + assert_eq!(tokens[2].token, Token::Temperature); + assert_eq!(tokens[3].token, Token::MaxTokens); + assert_eq!(tokens[4].token, Token::Stream); + assert_eq!(tokens[5].token, Token::Tool); + assert_eq!(tokens[6].token, Token::ToolCall); + assert_eq!(tokens[7].token, Token::Call); + } + + #[test] + fn test_lexer_with_flow_keywords() { + let mut lexer = Lexer::new("flow step parallel condition loop retry"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Flow); + assert_eq!(tokens[1].token, Token::Step); + assert_eq!(tokens[2].token, Token::Parallel); + assert_eq!(tokens[3].token, Token::Condition); + assert_eq!(tokens[4].token, Token::Loop); + assert_eq!(tokens[5].token, Token::Retry); + } + + #[test] + fn test_lexer_with_fn_keyword() { + let mut lexer = Lexer::new("fn test() {}"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Function); + assert_eq!(tokens[1].token, Token::Identifier("test".to_string())); + assert_eq!(tokens[2].token, Token::LeftParen); + assert_eq!(tokens[3].token, Token::RightParen); + assert_eq!(tokens[4].token, Token::LeftBrace); + assert_eq!(tokens[5].token, Token::RightBrace); + } + + #[test] + fn test_lexer_with_identifiers_underscore() { + let mut lexer = Lexer::new("_test test_var var_123"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Identifier("_test".to_string())); + assert_eq!(tokens[1].token, Token::Identifier("test_var".to_string())); + assert_eq!(tokens[2].token, Token::Identifier("var_123".to_string())); + } + + #[test] + fn test_lexer_with_string_escape_sequences() { + let mut lexer = Lexer::new("let x = \"Hello\\nWorld\\tTab\\rReturn\\\"Quote\\'Single\";"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + if let Token::String(s) = &tokens[3].token { + assert!(s.contains('\n')); + assert!(s.contains('\t')); + assert!(s.contains('\r')); + assert!(s.contains('"')); + assert!(s.contains('\'')); + } else { + panic!("Expected String token"); + } + } + + #[test] + fn test_lexer_with_single_quote_string() { + let mut lexer = Lexer::new("let x = 'Hello, world!';"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::String("Hello, world!".to_string())); + assert_eq!(tokens[4].token, Token::Semicolon); + } + + #[test] + fn test_lexer_with_empty_string() { + let mut lexer = Lexer::new("let x = \"\";"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::String(String::new())); + assert_eq!(tokens[4].token, Token::Semicolon); + } + + #[test] + fn test_lexer_with_template_string_simple_brace() { + let mut lexer = Lexer::new("let x = \"Hello, {name}!\";"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!( + tokens[3].token, + Token::TemplateString(vec![ + TokenTemplatePart::Text("Hello, ".to_string()), + TokenTemplatePart::Expression("name".to_string()), + TokenTemplatePart::Text("!".to_string()) + ]) + ); + } + + #[test] + fn test_lexer_with_template_string_nested_braces() { + let mut lexer = Lexer::new("let x = \"Hello, ${outer{inner}}!\";"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + if let Token::TemplateString(parts) = &tokens[3].token { + assert_eq!(parts.len(), 3); + assert_eq!(parts[0], TokenTemplatePart::Text("Hello, ".to_string())); + if let TokenTemplatePart::Expression(expr) = &parts[1] { + assert!(expr.contains("outer{inner}")); + } + assert_eq!(parts[2], TokenTemplatePart::Text("!".to_string())); + } else { + panic!("Expected TemplateString token"); + } + } + + #[test] + fn test_lexer_with_template_string_multiple_expressions() { + let mut lexer = Lexer::new("let x = \"${first} and ${second}\";"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + if let Token::TemplateString(parts) = &tokens[3].token { + assert_eq!(parts.len(), 3); + assert_eq!(parts[0], TokenTemplatePart::Expression("first".to_string())); + assert_eq!(parts[1], TokenTemplatePart::Text(" and ".to_string())); + assert_eq!(parts[2], TokenTemplatePart::Expression("second".to_string())); + } else { + panic!("Expected TemplateString token"); + } + } + + #[test] + fn test_lexer_with_punctuation() { + let mut lexer = Lexer::new("(){}[],;:."); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::LeftParen); + assert_eq!(tokens[1].token, Token::RightParen); + assert_eq!(tokens[2].token, Token::LeftBrace); + assert_eq!(tokens[3].token, Token::RightBrace); + assert_eq!(tokens[4].token, Token::LeftBracket); + assert_eq!(tokens[5].token, Token::RightBracket); + assert_eq!(tokens[6].token, Token::Comma); + assert_eq!(tokens[7].token, Token::Semicolon); + assert_eq!(tokens[8].token, Token::Colon); + assert_eq!(tokens[9].token, Token::Dot); + } + + #[test] + fn test_lexer_with_question_mark() { + let mut lexer = Lexer::new("x ? y : z"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[1].token, Token::QuestionMark); + assert_eq!(tokens[2].token, Token::Identifier("y".to_string())); + assert_eq!(tokens[3].token, Token::Colon); + assert_eq!(tokens[4].token, Token::Identifier("z".to_string())); + } + + #[test] + fn test_lexer_with_newlines() { + let mut lexer = Lexer::new("let x = 1;\nlet y = 2;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[4].token, Token::Semicolon); + assert_eq!(tokens[5].token, Token::Newline); + assert_eq!(tokens[6].token, Token::Let); + } + + #[test] + fn test_lexer_with_complex_expression() { + let mut lexer = Lexer::new("let result = (x + y) * z / 2;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("result".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::LeftParen); + assert_eq!(tokens[4].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[5].token, Token::Plus); + assert_eq!(tokens[6].token, Token::Identifier("y".to_string())); + assert_eq!(tokens[7].token, Token::RightParen); + assert_eq!(tokens[8].token, Token::Multiply); + assert_eq!(tokens[9].token, Token::Identifier("z".to_string())); + assert_eq!(tokens[10].token, Token::Divide); + assert_eq!(tokens[11].token, Token::Number("2".to_string())); + assert_eq!(tokens[12].token, Token::Semicolon); + } + + #[test] + fn test_lexer_with_unterminated_string() { + let mut lexer = Lexer::new("let x = \"Hello"); + let result = lexer.tokenize(); + assert!(result.is_err()); + if let Err(LexerError::UnterminatedString(_, _)) = result { + // Expected error + } else { + panic!("Expected UnterminatedString error"); + } + } + + #[test] + fn test_lexer_with_unexpected_character() { + let mut lexer = Lexer::new("let x = @invalid;"); + let result = lexer.tokenize(); + assert!(result.is_err()); + if let Err(LexerError::UnexpectedCharacter('@', _, _)) = result { + // Expected error + } else { + panic!("Expected UnexpectedCharacter error"); + } + } + + #[test] + fn test_lexer_with_whitespace_handling() { + let mut lexer = Lexer::new("let x = 1;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::Number("1".to_string())); + assert_eq!(tokens[4].token, Token::Semicolon); + } + + #[test] + fn test_lexer_with_memory_state_keywords() { + let mut lexer = Lexer::new("memory state print input"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Memory); + assert_eq!(tokens[1].token, Token::State); + assert_eq!(tokens[2].token, Token::Print); + assert_eq!(tokens[3].token, Token::Input); + } + + #[test] + fn test_lexer_with_number_starting_with_zero() { + let mut lexer = Lexer::new("let x = 0123;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[3].token, Token::Number("0123".to_string())); + } + + #[test] + fn test_lexer_with_float_without_fractional_part() { + let mut lexer = Lexer::new("let x = 123.;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[3].token, Token::Number("123.".to_string())); + } + + #[test] + fn test_lexer_with_plus_plus_operator() { + // Note: The lexer currently treats ++ as just Plus, not a separate increment operator + let mut lexer = Lexer::new("x++"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Identifier("x".to_string())); + assert_eq!(tokens[1].token, Token::Plus); + } + + #[test] + fn test_lexer_with_string_containing_quote() { + let mut lexer = Lexer::new("let x = \"He said \\\"Hello\\\"\";"); + let tokens = lexer.tokenize().unwrap(); + if let Token::String(s) = &tokens[3].token { + assert!(s.contains('"')); + } else { + panic!("Expected String token"); + } + } + + #[test] + fn test_lexer_with_template_string_only_expression() { + let mut lexer = Lexer::new("let x = \"${name}\";"); + let tokens = lexer.tokenize().unwrap(); + if let Token::TemplateString(parts) = &tokens[3].token { + assert_eq!(parts.len(), 1); + assert_eq!(parts[0], TokenTemplatePart::Expression("name".to_string())); + } else { + panic!("Expected TemplateString token"); + } + } + + #[test] + fn test_lexer_with_template_string_only_text() { + let mut lexer = Lexer::new("let x = \"Hello\";"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[3].token, Token::String("Hello".to_string())); + } + + #[test] + fn test_lexer_with_identifier_not_keyword() { + // Test that identifiers that are not keywords are parsed correctly + let mut lexer = Lexer::new("let variable = identifier;"); + let tokens = lexer.tokenize().unwrap(); + assert_eq!(tokens[0].token, Token::Let); + assert_eq!(tokens[1].token, Token::Identifier("variable".to_string())); + assert_eq!(tokens[2].token, Token::Assign); + assert_eq!(tokens[3].token, Token::Identifier("identifier".to_string())); + } +} diff --git a/rohas-core/src/parser.rs b/rohas-core/src/parser.rs index d4d80cc..a03ac3e 100644 --- a/rohas-core/src/parser.rs +++ b/rohas-core/src/parser.rs @@ -160,7 +160,6 @@ impl Parser { self.consume(Token::RightParen)?; cond } else { - self.parse_expression()? }; self.skip_whitespace(); @@ -184,12 +183,19 @@ impl Parser { fn parse_while_statement(&mut self) -> Result { self.consume(Token::While)?; - self.consume(Token::LeftParen)?; + self.skip_whitespace(); + // self.consume(Token::LeftParen)?; + // self.skip_whitespace(); let condition = self.parse_expression()?; - self.consume(Token::RightParen)?; + self.skip_whitespace(); + // self.consume(Token::RightParen)?; + // self.skip_whitespace(); self.consume(Token::LeftBrace)?; + self.skip_whitespace(); let body = self.parse_block()?; + self.skip_whitespace(); self.consume(Token::RightBrace)?; + self.skip_whitespace(); Ok(Statement::WhileStatement { condition, body }) } diff --git a/rohas-web/Cargo.toml b/rohas-web/Cargo.toml new file mode 100644 index 0000000..9b70b8e --- /dev/null +++ b/rohas-web/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "rohas-web" +version = { workspace = true } +edition = { workspace = true } +authors = { workspace = true } +license = { workspace = true } + +[dependencies] +rohas-core = { path = "../rohas-core" } +rohas-llm = { path = "../rohas-llm" } +rohas-plugins = { path = "../rohas-plugins" } +tokio = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +thiserror = { workspace = true } +anyhow = { workspace = true } +reqwest = { workspace = true }