Skip to content

Commit c4ead81

Browse files
authored
Merge pull request #179 from spicecodecli/new-tests-2
New tests 2
2 parents 8a0c7fe + 6596ead commit c4ead81

32 files changed

+2291
-9
lines changed

.github/workflows/run_tests.yml

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -21,15 +21,17 @@ jobs:
2121
- name: Install dependencies
2222
run: |
2323
python -m pip install --upgrade pip
24+
# Install the project in editable mode to pick up changes
2425
pip install -e .
25-
pip install pytest typer numpy
26-
# Note: Ideally, you should fix your requirements.txt and use:
27-
# pip install .
28-
# Or at least:
29-
# pip install -r requirements.txt
30-
# But due to the encoding and importlib issues observed,
31-
# installing specific dependencies needed for tests directly for now.
26+
# Install test dependencies, including pytest-cov for coverage
27+
pip install pytest typer numpy pytest-cov
28+
# Note: Ideally, dependencies should be managed via requirements-dev.txt
29+
# Consider adding pytest-cov to requirements-dev.txt later.
3230
33-
- name: Run tests
31+
- name: Run tests with coverage
3432
run: |
35-
python -m pytest tests/analyze/
33+
# Run pytest on the entire tests directory
34+
# Generate coverage report for specified source directories
35+
# Report missing lines directly in the terminal output
36+
python -m pytest tests/ --cov=spice --cov=cli --cov=utils --cov=parser --cov=lexers --cov-report=term-missing
37+

tests/cli/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+

tests/cli/commands/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+

tests/cli/commands/test_version.py

Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
import pytest
2+
import os
3+
from unittest.mock import patch, mock_open, MagicMock
4+
from typer.testing import CliRunner
5+
6+
# Assuming cli.main is the entry point for typer app
7+
# We need to adjust imports based on actual structure if main.py is elsewhere
8+
# Let's assume main.py exists and imports version_command correctly
9+
# We will test the command function directly for simplicity here,
10+
# avoiding the need for a full typer app setup in this unit test.
11+
from cli.commands.version import version_command
12+
13+
# Dummy translation messages
14+
DUMMY_MESSAGES = {
15+
"version_info": "SpiceCode Version:",
16+
"version_not_found": "Version information not found in setup.py",
17+
"setup_not_found": "Error: setup.py not found.",
18+
"error": "Error:",
19+
}
20+
21+
# Mock CURRENT_DIR (assuming it's the 'cli' directory for the command)
22+
TEST_CURRENT_DIR = "/home/ubuntu/spicecode/cli"
23+
EXPECTED_SETUP_PATH = "/home/ubuntu/spicecode/setup.py"
24+
25+
@patch("cli.commands.version.get_translation")
26+
@patch("os.path.exists")
27+
def test_version_command_success(mock_exists, mock_get_translation, capsys):
28+
"""Test version command when setup.py exists and contains version."""
29+
mock_get_translation.return_value = DUMMY_MESSAGES
30+
mock_exists.return_value = True
31+
32+
# Create file content with version line
33+
file_content = 'name="spicecode",\nversion="1.2.3",\nauthor="test"\n'
34+
35+
# Use mock_open with read_data parameter
36+
with patch("builtins.open", mock_open(read_data=file_content)) as mock_file:
37+
version_command(LANG_FILE="dummy_lang.txt", CURRENT_DIR=TEST_CURRENT_DIR)
38+
39+
captured = capsys.readouterr()
40+
41+
mock_exists.assert_called_once_with(EXPECTED_SETUP_PATH)
42+
mock_file.assert_called_once_with(EXPECTED_SETUP_PATH, "r")
43+
assert "SpiceCode Version: 1.2.3" in captured.out
44+
45+
@patch("cli.commands.version.get_translation")
46+
@patch("os.path.exists")
47+
def test_version_command_version_not_in_setup(mock_exists, mock_get_translation, capsys):
48+
"""Test version command when setup.py exists but lacks version info."""
49+
mock_get_translation.return_value = DUMMY_MESSAGES
50+
mock_exists.return_value = True
51+
52+
# Create file content without version line
53+
file_content = 'name="spicecode",\nauthor="test",\ndescription="A CLI tool"\n'
54+
55+
with patch("builtins.open", mock_open(read_data=file_content)) as mock_file:
56+
version_command(LANG_FILE="dummy_lang.txt", CURRENT_DIR=TEST_CURRENT_DIR)
57+
58+
captured = capsys.readouterr()
59+
60+
mock_exists.assert_called_once_with(EXPECTED_SETUP_PATH)
61+
mock_file.assert_called_once_with(EXPECTED_SETUP_PATH, "r")
62+
assert "Version information not found in setup.py" in captured.out
63+
64+
@patch("cli.commands.version.get_translation")
65+
@patch("os.path.exists")
66+
def test_version_command_setup_not_found(mock_exists, mock_get_translation, capsys):
67+
"""Test version command when setup.py does not exist."""
68+
mock_get_translation.return_value = DUMMY_MESSAGES
69+
mock_exists.return_value = False
70+
71+
version_command(LANG_FILE="dummy_lang.txt", CURRENT_DIR=TEST_CURRENT_DIR)
72+
73+
captured = capsys.readouterr()
74+
75+
mock_exists.assert_called_once_with(EXPECTED_SETUP_PATH)
76+
assert "Error: setup.py not found." in captured.out
77+
78+
@patch("cli.commands.version.get_translation")
79+
@patch("os.path.exists")
80+
def test_version_command_read_error(mock_exists, mock_get_translation, capsys):
81+
"""Test version command handles exceptions during file reading."""
82+
mock_get_translation.return_value = DUMMY_MESSAGES
83+
mock_exists.return_value = True
84+
85+
with patch("builtins.open", side_effect=OSError("Permission denied")) as mock_file:
86+
version_command(LANG_FILE="dummy_lang.txt", CURRENT_DIR=TEST_CURRENT_DIR)
87+
88+
captured = capsys.readouterr()
89+
90+
mock_exists.assert_called_once_with(EXPECTED_SETUP_PATH)
91+
mock_file.assert_called_once_with(EXPECTED_SETUP_PATH, "r")
92+
assert "Error: Permission denied" in captured.out

tests/lexers/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+

tests/lexers/test_golexer.py

Lines changed: 196 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,196 @@
1+
# import pytest
2+
# from lexers.golang.golexer import GoLexer
3+
# from lexers.token import TokenType
4+
5+
# # Helper function to compare token lists, ignoring EOF
6+
# def assert_tokens_equal(actual_tokens, expected_tokens_data):
7+
# if actual_tokens and actual_tokens[-1].type == TokenType.EOF:
8+
# actual_tokens = actual_tokens[:-1]
9+
10+
# assert len(actual_tokens) == len(expected_tokens_data), \
11+
# f"Expected {len(expected_tokens_data)} tokens, but got {len(actual_tokens)}\nActual: {actual_tokens}\nExpected data: {expected_tokens_data}"
12+
13+
# for i, (token_type, value) in enumerate(expected_tokens_data):
14+
# assert actual_tokens[i].type == token_type, f"Token {i} type mismatch: Expected {token_type}, got {actual_tokens[i].type} ({actual_tokens[i].value})"
15+
# assert actual_tokens[i].value == value, f"Token {i} value mismatch: Expected '{value}', got '{actual_tokens[i].value}'"
16+
17+
# # --- Test Cases ---
18+
19+
# def test_go_empty_input():
20+
# lexer = GoLexer("")
21+
# tokens = lexer.tokenize()
22+
# assert len(tokens) == 1
23+
# assert tokens[0].type == TokenType.EOF
24+
25+
# def test_go_keywords():
26+
# code = "package import func var const type struct interface if else for range switch case default return break continue goto fallthrough defer go select chan map make new len cap append copy delete panic recover true false nil"
27+
# lexer = GoLexer(code)
28+
# tokens = lexer.tokenize()
29+
# expected = [
30+
# (TokenType.KEYWORD, "package"), (TokenType.KEYWORD, "import"), (TokenType.KEYWORD, "func"), (TokenType.KEYWORD, "var"),
31+
# (TokenType.KEYWORD, "const"), (TokenType.KEYWORD, "type"), (TokenType.KEYWORD, "struct"), (TokenType.KEYWORD, "interface"),
32+
# (TokenType.KEYWORD, "if"), (TokenType.KEYWORD, "else"), (TokenType.KEYWORD, "for"), (TokenType.KEYWORD, "range"),
33+
# (TokenType.KEYWORD, "switch"), (TokenType.KEYWORD, "case"), (TokenType.KEYWORD, "default"), (TokenType.KEYWORD, "return"),
34+
# (TokenType.KEYWORD, "break"), (TokenType.KEYWORD, "continue"), (TokenType.KEYWORD, "goto"), (TokenType.KEYWORD, "fallthrough"),
35+
# (TokenType.KEYWORD, "defer"), (TokenType.KEYWORD, "go"), (TokenType.KEYWORD, "select"), (TokenType.KEYWORD, "chan"),
36+
# (TokenType.KEYWORD, "map"), (TokenType.KEYWORD, "make"), (TokenType.KEYWORD, "new"), (TokenType.KEYWORD, "len"),
37+
# (TokenType.KEYWORD, "cap"), (TokenType.KEYWORD, "append"), (TokenType.KEYWORD, "copy"), (TokenType.KEYWORD, "delete"),
38+
# (TokenType.KEYWORD, "panic"), (TokenType.KEYWORD, "recover"), (TokenType.KEYWORD, "true"), (TokenType.KEYWORD, "false"),
39+
# (TokenType.KEYWORD, "nil")
40+
# ]
41+
# assert_tokens_equal(tokens, expected)
42+
43+
# def test_go_identifiers():
44+
# code = "myVar _anotherVar var123 _"
45+
# lexer = GoLexer(code)
46+
# tokens = lexer.tokenize()
47+
# expected = [
48+
# (TokenType.IDENTIFIER, "myVar"),
49+
# (TokenType.IDENTIFIER, "_anotherVar"),
50+
# (TokenType.IDENTIFIER, "var123"),
51+
# (TokenType.IDENTIFIER, "_"),
52+
# ]
53+
# assert_tokens_equal(tokens, expected)
54+
55+
# def test_go_numbers():
56+
# code = "123 45.67 0.5 1e3 2.5e-2 99"
57+
# lexer = GoLexer(code)
58+
# tokens = lexer.tokenize()
59+
# expected = [
60+
# (TokenType.NUMBER, "123"),
61+
# (TokenType.NUMBER, "45.67"),
62+
# (TokenType.NUMBER, "0.5"),
63+
# (TokenType.NUMBER, "1e3"),
64+
# (TokenType.NUMBER, "2.5e-2"),
65+
# (TokenType.NUMBER, "99"),
66+
# ]
67+
# assert_tokens_equal(tokens, expected)
68+
69+
# def test_go_strings():
70+
# code = "\"hello\" `raw string\nwith newline` \"with \\\"escape\\\"\""
71+
# lexer = GoLexer(code)
72+
# tokens = lexer.tokenize()
73+
# expected = [
74+
# (TokenType.STRING, "\"hello\""),
75+
# (TokenType.STRING, "`raw string\nwith newline`"),
76+
# (TokenType.STRING, "\"with \\\"escape\\\"\""),
77+
# ]
78+
# assert_tokens_equal(tokens, expected)
79+
80+
# def test_go_operators():
81+
# code = "+ - * / % = == != < > <= >= && || ! & | ^ << >> &^ += -= *= /= %= &= |= ^= <<= >>= &^= ++ -- := ... -> <-"
82+
# lexer = GoLexer(code)
83+
# tokens = lexer.tokenize()
84+
# expected = [
85+
# (TokenType.OPERATOR, "+"), (TokenType.OPERATOR, "-"), (TokenType.OPERATOR, "*"), (TokenType.OPERATOR, "/"), (TokenType.OPERATOR, "%"),
86+
# (TokenType.OPERATOR, "="), (TokenType.OPERATOR, "=="), (TokenType.OPERATOR, "!="), (TokenType.OPERATOR, "<"), (TokenType.OPERATOR, ">"),
87+
# (TokenType.OPERATOR, "<="), (TokenType.OPERATOR, ">="), (TokenType.OPERATOR, "&&"), (TokenType.OPERATOR, "||"), (TokenType.OPERATOR, "!"),
88+
# (TokenType.OPERATOR, "&"), (TokenType.OPERATOR, "|"), (TokenType.OPERATOR, "^"), (TokenType.OPERATOR, "<<"), (TokenType.OPERATOR, ">>"),
89+
# (TokenType.OPERATOR, "&^"), (TokenType.OPERATOR, "+="), (TokenType.OPERATOR, "-="), (TokenType.OPERATOR, "*="), (TokenType.OPERATOR, "/="),
90+
# (TokenType.OPERATOR, "%="), (TokenType.OPERATOR, "&="), (TokenType.OPERATOR, "|="), (TokenType.OPERATOR, "^="), (TokenType.OPERATOR, "<<="),
91+
# (TokenType.OPERATOR, ">>="), (TokenType.OPERATOR, "&^="), (TokenType.OPERATOR, "++"), (TokenType.OPERATOR, "--"), (TokenType.OPERATOR, ":="),
92+
# (TokenType.OPERATOR, "..."), (TokenType.OPERATOR, "->"), (TokenType.OPERATOR, "<-")
93+
# ]
94+
# assert_tokens_equal(tokens, expected)
95+
96+
# def test_go_delimiters():
97+
# code = "( ) { } [ ] , ; . :"
98+
# lexer = GoLexer(code)
99+
# tokens = lexer.tokenize()
100+
# expected = [
101+
# (TokenType.DELIMITER, "("), (TokenType.DELIMITER, ")"),
102+
# (TokenType.DELIMITER, "{"), (TokenType.DELIMITER, "}"),
103+
# (TokenType.DELIMITER, "["), (TokenType.DELIMITER, "]"),
104+
# (TokenType.DELIMITER, ","), (TokenType.DELIMITER, ";"),
105+
# (TokenType.DELIMITER, "."), (TokenType.DELIMITER, ":"),
106+
# ]
107+
# assert_tokens_equal(tokens, expected)
108+
109+
# def test_go_comments():
110+
# code = "// Single line comment\nvar x = 1 // Another comment\n/* Multi-line\n comment */ y := 2"
111+
# lexer = GoLexer(code)
112+
# tokens = lexer.tokenize()
113+
# expected = [
114+
# (TokenType.COMMENT, "// Single line comment"), (TokenType.NEWLINE, "\\n"),
115+
# (TokenType.KEYWORD, "var"), (TokenType.IDENTIFIER, "x"), (TokenType.OPERATOR, "="), (TokenType.NUMBER, "1"), (TokenType.COMMENT, "// Another comment"), (TokenType.NEWLINE, "\\n"),
116+
# (TokenType.COMMENT, "/* Multi-line\n comment */"),
117+
# (TokenType.IDENTIFIER, "y"), (TokenType.OPERATOR, ":="), (TokenType.NUMBER, "2"),
118+
# ]
119+
# assert_tokens_equal(tokens, expected)
120+
121+
# def test_go_mixed_code():
122+
# code = """
123+
# package main
124+
125+
# import "fmt"
126+
127+
# func main() {
128+
# // Declare and initialize
129+
# message := "Hello, Go!"
130+
# fmt.Println(message) // Print message
131+
# num := 10 + 5
132+
# if num > 10 {
133+
# return
134+
# }
135+
# }
136+
# """
137+
# lexer = GoLexer(code)
138+
# tokens = lexer.tokenize()
139+
# expected = [
140+
# (TokenType.NEWLINE, "\\n"),
141+
# (TokenType.KEYWORD, "package"), (TokenType.IDENTIFIER, "main"), (TokenType.NEWLINE, "\\n"),
142+
# (TokenType.NEWLINE, "\\n"),
143+
# (TokenType.KEYWORD, "import"), (TokenType.STRING, "\"fmt\""), (TokenType.NEWLINE, "\\n"),
144+
# (TokenType.NEWLINE, "\\n"),
145+
# (TokenType.KEYWORD, "func"), (TokenType.IDENTIFIER, "main"), (TokenType.DELIMITER, "("), (TokenType.DELIMITER, ")"), (TokenType.DELIMITER, "{"), (TokenType.NEWLINE, "\\n"),
146+
# (TokenType.COMMENT, "// Declare and initialize"), (TokenType.NEWLINE, "\\n"),
147+
# (TokenType.IDENTIFIER, "message"), (TokenType.OPERATOR, ":="), (TokenType.STRING, "\"Hello, Go!\""), (TokenType.NEWLINE, "\\n"),
148+
# (TokenType.IDENTIFIER, "fmt"), (TokenType.DELIMITER, "."), (TokenType.IDENTIFIER, "Println"), (TokenType.DELIMITER, "("), (TokenType.IDENTIFIER, "message"), (TokenType.DELIMITER, ")"), (TokenType.COMMENT, "// Print message"), (TokenType.NEWLINE, "\\n"),
149+
# (TokenType.IDENTIFIER, "num"), (TokenType.OPERATOR, ":="), (TokenType.NUMBER, "10"), (TokenType.OPERATOR, "+"), (TokenType.NUMBER, "5"), (TokenType.NEWLINE, "\\n"),
150+
# (TokenType.KEYWORD, "if"), (TokenType.IDENTIFIER, "num"), (TokenType.OPERATOR, ">"), (TokenType.NUMBER, "10"), (TokenType.DELIMITER, "{"), (TokenType.NEWLINE, "\\n"),
151+
# (TokenType.KEYWORD, "return"), (TokenType.NEWLINE, "\\n"),
152+
# (TokenType.DELIMITER, "}"), (TokenType.NEWLINE, "\\n"),
153+
# (TokenType.DELIMITER, "}"), (TokenType.NEWLINE, "\\n"),
154+
# ]
155+
# assert_tokens_equal(tokens, expected)
156+
157+
# def test_go_error_character():
158+
# code = "var a = @;"
159+
# lexer = GoLexer(code)
160+
# tokens = lexer.tokenize()
161+
# expected = [
162+
# (TokenType.KEYWORD, "var"),
163+
# (TokenType.IDENTIFIER, "a"),
164+
# (TokenType.OPERATOR, "="),
165+
# (TokenType.ERROR, "@"),
166+
# (TokenType.DELIMITER, ";"),
167+
# ]
168+
# assert_tokens_equal(tokens, expected)
169+
170+
# def test_go_unterminated_string():
171+
# code = "\"unterminated string"
172+
# lexer = GoLexer(code)
173+
# tokens = lexer.tokenize()
174+
# # Go lexer should return the unterminated string as a STRING token
175+
# expected = [
176+
# (TokenType.STRING, "\"unterminated string"),
177+
# ]
178+
# assert_tokens_equal(tokens, expected)
179+
180+
# def test_go_unterminated_raw_string():
181+
# code = "`unterminated raw string"
182+
# lexer = GoLexer(code)
183+
# tokens = lexer.tokenize()
184+
# expected = [
185+
# (TokenType.STRING, "`unterminated raw string"),
186+
# ]
187+
# assert_tokens_equal(tokens, expected)
188+
189+
# def test_go_unterminated_comment():
190+
# code = "/* Unterminated comment"
191+
# lexer = GoLexer(code)
192+
# tokens = lexer.tokenize()
193+
# # Go lexer returns an ERROR token for unterminated multi-line comments
194+
# assert len(tokens) == 2 # ERROR token + EOF
195+
# assert tokens[0].type == TokenType.ERROR
196+
# assert "unterminated comment" in tokens[0].value.lower()

0 commit comments

Comments
 (0)