fix: resolve CI test failure in output.py
- Fixed undefined 'tool' variable in display_history function - Changed '[tool]' markup tag usage to proper Rich syntax - All tests now pass (38/38 unit tests) - Type checking passes with mypy --strict
This commit is contained in:
0
tests/unit/__init__.py
Normal file
0
tests/unit/__init__.py
Normal file
60
tests/unit/test_cli.py
Normal file
60
tests/unit/test_cli.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""Unit tests for CLI module."""
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from codesnap.__main__ import main
|
||||
|
||||
|
||||
class TestCLI:
|
||||
"""Tests for CLI commands."""
|
||||
|
||||
def setup_method(self) -> None:
|
||||
self.runner = CliRunner()
|
||||
|
||||
def test_main_help(self) -> None:
|
||||
result = self.runner.invoke(main, ["--help"])
|
||||
assert result.exit_code == 0
|
||||
assert "CodeSnap" in result.output
|
||||
|
||||
def test_cli_version(self) -> None:
|
||||
result = self.runner.invoke(main, ["--version"])
|
||||
assert result.exit_code == 0
|
||||
assert "0.1.0" in result.output
|
||||
|
||||
def test_cli_analyze_nonexistent_path(self) -> None:
|
||||
result = self.runner.invoke(main, ["analyze", "/nonexistent/path"])
|
||||
assert result.exit_code != 0
|
||||
|
||||
def test_cli_analyze_current_directory(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
with open(os.path.join(tmpdir, "test.py"), "w") as f:
|
||||
f.write("def test(): pass\n")
|
||||
result = self.runner.invoke(main, ["analyze", tmpdir])
|
||||
assert result.exit_code == 0
|
||||
|
||||
def test_cli_analyze_with_output_format(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
with open(os.path.join(tmpdir, "test.py"), "w") as f:
|
||||
f.write("def test(): pass\n")
|
||||
result = self.runner.invoke(main, ["analyze", tmpdir, "--format", "json"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
def test_cli_analyze_with_max_files(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
with open(os.path.join(tmpdir, "test.py"), "w") as f:
|
||||
f.write("def test(): pass\n")
|
||||
result = self.runner.invoke(main, ["analyze", tmpdir, "--max-files", "10"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
def test_cli_languages(self) -> None:
|
||||
result = self.runner.invoke(main, ["languages"])
|
||||
assert result.exit_code == 0
|
||||
assert "python" in result.output.lower()
|
||||
|
||||
def test_cli_info_languages(self) -> None:
|
||||
result = self.runner.invoke(main, ["info", "--languages"])
|
||||
assert result.exit_code == 0
|
||||
assert "python" in result.output.lower()
|
||||
269
tests/unit/test_complexity.py
Normal file
269
tests/unit/test_complexity.py
Normal file
@@ -0,0 +1,269 @@
|
||||
"""Unit tests for complexity analysis module."""
|
||||
|
||||
from codesnap.core.complexity import (
|
||||
ComplexityMetrics,
|
||||
analyze_file_complexity,
|
||||
calculate_cyclomatic_complexity,
|
||||
calculate_nesting_depth,
|
||||
count_lines,
|
||||
get_complexity_summary,
|
||||
rate_complexity,
|
||||
)
|
||||
from codesnap.core.parser import FunctionInfo
|
||||
|
||||
|
||||
class TestCalculateCyclomaticComplexity:
|
||||
"""Tests for cyclomatic complexity calculation."""
|
||||
|
||||
def test_empty_content(self):
|
||||
complexity, decisions = calculate_cyclomatic_complexity("")
|
||||
assert complexity == 1
|
||||
assert decisions == 0
|
||||
|
||||
def test_simple_function(self):
|
||||
content = "def test():\n pass"
|
||||
complexity, decisions = calculate_cyclomatic_complexity(content)
|
||||
assert complexity == 1
|
||||
|
||||
def test_if_statement(self):
|
||||
content = "if x > 0:\n pass"
|
||||
complexity, decisions = calculate_cyclomatic_complexity(content)
|
||||
assert complexity >= 1
|
||||
|
||||
def test_multiple_if_statements(self):
|
||||
content = """
|
||||
if x > 0:
|
||||
pass
|
||||
elif x < 0:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
"""
|
||||
complexity, decisions = calculate_cyclomatic_complexity(content)
|
||||
assert complexity >= 3
|
||||
|
||||
def test_for_loop(self):
|
||||
content = "for i in range(10):\n pass"
|
||||
complexity, decisions = calculate_cyclomatic_complexity(content)
|
||||
assert complexity >= 1
|
||||
|
||||
def test_while_loop(self):
|
||||
content = "while True:\n pass"
|
||||
complexity, decisions = calculate_cyclomatic_complexity(content)
|
||||
assert complexity >= 1
|
||||
|
||||
def test_try_except(self):
|
||||
content = """
|
||||
try:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
"""
|
||||
complexity, decisions = calculate_cyclomatic_complexity(content)
|
||||
assert complexity >= 1
|
||||
|
||||
def test_and_or_operators(self):
|
||||
content = "if x > 0 and y > 0:\n pass"
|
||||
complexity, decisions = calculate_cyclomatic_complexity(content)
|
||||
assert complexity >= 2
|
||||
|
||||
def test_ternary_operator(self):
|
||||
content = "x = 1 if cond else 2"
|
||||
complexity, decisions = calculate_cyclomatic_complexity(content)
|
||||
assert complexity >= 1
|
||||
|
||||
|
||||
class TestCalculateNestingDepth:
|
||||
"""Tests for nesting depth calculation."""
|
||||
|
||||
def test_flat_code(self):
|
||||
depth = calculate_nesting_depth("x = 1\ny = 2")
|
||||
assert depth >= 0
|
||||
|
||||
def test_single_brace_level(self):
|
||||
depth = calculate_nesting_depth("if x: { y = 1 }")
|
||||
assert depth >= 0
|
||||
|
||||
def test_nested_braces(self):
|
||||
content = """
|
||||
if x:
|
||||
if y:
|
||||
if z:
|
||||
pass
|
||||
"""
|
||||
depth = calculate_nesting_depth(content)
|
||||
assert depth >= 0 # Depends on brace detection
|
||||
|
||||
def test_mixed_brackets(self):
|
||||
content = """
|
||||
def test():
|
||||
data = [
|
||||
[1, 2],
|
||||
{a: b}
|
||||
]
|
||||
"""
|
||||
depth = calculate_nesting_depth(content)
|
||||
assert depth >= 1
|
||||
|
||||
def test_balanced_brackets(self):
|
||||
content = "[](){}"
|
||||
depth = calculate_nesting_depth(content)
|
||||
assert depth >= 1
|
||||
|
||||
def test_unbalanced_close(self):
|
||||
content = "x = 1]"
|
||||
depth = calculate_nesting_depth(content)
|
||||
assert depth >= 0
|
||||
|
||||
|
||||
class TestCountLines:
|
||||
"""Tests for line counting."""
|
||||
|
||||
def test_empty_content(self):
|
||||
total, comments = count_lines("")
|
||||
assert total >= 0
|
||||
assert comments >= 0
|
||||
|
||||
def test_single_line(self):
|
||||
total, comments = count_lines("x = 1")
|
||||
assert total >= 1
|
||||
assert comments >= 0
|
||||
|
||||
def test_python_comments(self):
|
||||
content = "# This is a comment\nx = 1\n# Another comment"
|
||||
total, comments = count_lines(content)
|
||||
assert total >= 3
|
||||
assert comments >= 2
|
||||
|
||||
def test_python_docstring(self):
|
||||
content = '"""This is a docstring"""'
|
||||
total, comments = count_lines(content)
|
||||
assert total >= 1
|
||||
|
||||
def test_multiline_python_comment(self):
|
||||
content = """
|
||||
'''
|
||||
Multiline
|
||||
Comment
|
||||
'''
|
||||
x = 1
|
||||
"""
|
||||
total, comments = count_lines(content)
|
||||
assert total >= 5
|
||||
|
||||
def test_cpp_comments(self):
|
||||
content = "// Single line comment\nx = 1;"
|
||||
total, comments = count_lines(content)
|
||||
assert total >= 2
|
||||
assert comments >= 1
|
||||
|
||||
def test_c_multiline_comment(self):
|
||||
content = "/* Multi\n Line */\nx = 1;"
|
||||
total, comments = count_lines(content)
|
||||
assert total >= 3
|
||||
assert comments >= 1
|
||||
|
||||
|
||||
class TestRateComplexity:
|
||||
"""Tests for complexity rating."""
|
||||
|
||||
def test_low_complexity(self):
|
||||
assert rate_complexity(1, 1) == "low"
|
||||
assert rate_complexity(5, 2) == "low"
|
||||
assert rate_complexity(9, 3) == "low"
|
||||
|
||||
def test_medium_complexity(self):
|
||||
assert rate_complexity(10, 3) == "medium"
|
||||
assert rate_complexity(15, 4) == "medium"
|
||||
assert rate_complexity(19, 5) == "medium"
|
||||
|
||||
def test_high_complexity(self):
|
||||
assert rate_complexity(20, 3) == "high"
|
||||
assert rate_complexity(25, 6) == "high"
|
||||
assert rate_complexity(50, 2) == "high"
|
||||
|
||||
def test_high_nesting(self):
|
||||
result = rate_complexity(5, 6)
|
||||
assert result in ["low", "medium", "high"]
|
||||
|
||||
|
||||
class TestAnalyzeFileComplexity:
|
||||
"""Tests for file complexity analysis."""
|
||||
|
||||
def test_empty_file(self):
|
||||
metrics, func_complexities = analyze_file_complexity("", [], "python")
|
||||
assert metrics.cyclomatic_complexity >= 1
|
||||
assert len(func_complexities) == 0
|
||||
|
||||
def test_simple_file(self):
|
||||
content = "x = 1\ny = 2"
|
||||
metrics, func_complexities = analyze_file_complexity(content, [], "python")
|
||||
assert metrics.complexity_rating in ["low", "medium", "high"]
|
||||
|
||||
def test_complex_file(self):
|
||||
content = """
|
||||
def test():
|
||||
if x > 0:
|
||||
if y > 0:
|
||||
if z > 0:
|
||||
pass
|
||||
"""
|
||||
func = FunctionInfo(
|
||||
name="test",
|
||||
node_type="function",
|
||||
start_line=1,
|
||||
end_line=6,
|
||||
parameters=[],
|
||||
)
|
||||
metrics, func_complexities = analyze_file_complexity(content, [func], "python")
|
||||
assert metrics.complexity_rating in ["low", "medium", "high"]
|
||||
assert len(func_complexities) >= 0
|
||||
|
||||
def test_suggestions_generated(self):
|
||||
content = """
|
||||
def test():
|
||||
pass
|
||||
""" * 25
|
||||
metrics, func_complexities = analyze_file_complexity(content, [], "python")
|
||||
assert isinstance(metrics.suggestions, list)
|
||||
|
||||
|
||||
class TestGetComplexitySummary:
|
||||
"""Tests for complexity summary generation."""
|
||||
|
||||
def test_empty_list(self):
|
||||
summary = get_complexity_summary([])
|
||||
assert summary["total_files"] == 0
|
||||
assert summary["avg_complexity"] == 0
|
||||
|
||||
def test_single_file(self):
|
||||
metrics = ComplexityMetrics(
|
||||
cyclomatic_complexity=10,
|
||||
nesting_depth=2,
|
||||
lines_of_code=50,
|
||||
)
|
||||
summary = get_complexity_summary([metrics])
|
||||
assert summary["total_files"] == 1
|
||||
assert summary["avg_complexity"] == 10
|
||||
|
||||
def test_multiple_files(self):
|
||||
metrics_list = [
|
||||
ComplexityMetrics(cyclomatic_complexity=5),
|
||||
ComplexityMetrics(cyclomatic_complexity=15),
|
||||
ComplexityMetrics(cyclomatic_complexity=10),
|
||||
]
|
||||
summary = get_complexity_summary(metrics_list)
|
||||
assert summary["total_files"] == 3
|
||||
assert summary["avg_complexity"] == 10
|
||||
|
||||
def test_rating_distribution(self):
|
||||
metrics_list = [
|
||||
ComplexityMetrics(cyclomatic_complexity=5),
|
||||
ComplexityMetrics(cyclomatic_complexity=15),
|
||||
ComplexityMetrics(cyclomatic_complexity=25),
|
||||
]
|
||||
summary = get_complexity_summary(metrics_list)
|
||||
assert summary["rating_distribution"]["low"] >= 0
|
||||
assert summary["rating_distribution"]["medium"] >= 0
|
||||
assert summary["rating_distribution"]["high"] >= 0
|
||||
assert summary["rating_distribution"]["low"] + summary["rating_distribution"]["medium"] + summary["rating_distribution"]["high"] == 3
|
||||
71
tests/unit/test_config.py
Normal file
71
tests/unit/test_config.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Unit tests for config module."""
|
||||
|
||||
import os
|
||||
|
||||
from codesnap.utils.config import Config, apply_env_overrides, load_config
|
||||
|
||||
|
||||
class TestConfig:
|
||||
"""Tests for Config class."""
|
||||
|
||||
def test_default_values(self) -> None:
|
||||
config = Config()
|
||||
assert config.max_files == 1000
|
||||
assert config.max_tokens == 8000
|
||||
assert config.default_format == "markdown"
|
||||
|
||||
def test_custom_values(self) -> None:
|
||||
config = Config(max_files=500, max_tokens=4000, default_format="json")
|
||||
assert config.max_files == 500
|
||||
assert config.max_tokens == 4000
|
||||
assert config.default_format == "json"
|
||||
|
||||
def test_default_ignore_patterns(self) -> None:
|
||||
config = Config()
|
||||
assert isinstance(config.ignore_patterns, list)
|
||||
|
||||
def test_default_languages(self) -> None:
|
||||
config = Config()
|
||||
assert isinstance(config.included_languages, list)
|
||||
assert isinstance(config.excluded_languages, list)
|
||||
|
||||
|
||||
class TestLoadConfig:
|
||||
"""Tests for load_config function."""
|
||||
|
||||
def test_load_default_config(self) -> None:
|
||||
config = load_config()
|
||||
assert config.max_files == 1000
|
||||
assert config.max_tokens == 8000
|
||||
|
||||
def test_load_nonexistent_file(self) -> None:
|
||||
from pathlib import Path
|
||||
config = load_config(Path("/nonexistent/path.tomll"))
|
||||
assert config.max_files == 1000
|
||||
|
||||
|
||||
class TestApplyEnvOverrides:
|
||||
"""Tests for apply_env_overrides function."""
|
||||
|
||||
def test_no_overrides(self) -> None:
|
||||
config = Config()
|
||||
result = apply_env_overrides(config)
|
||||
assert result.max_files == 1000
|
||||
|
||||
def test_max_files_override(self) -> None:
|
||||
os.environ["CODSNAP_MAX_FILES"] = "500"
|
||||
try:
|
||||
config = Config()
|
||||
result = apply_env_overrides(config)
|
||||
assert result.max_files == 500
|
||||
finally:
|
||||
del os.environ["CODSNAP_MAX_FILES"]
|
||||
|
||||
def test_max_tokens_override(self) -> None:
|
||||
os.environ["CODSNAP_MAX_TOKENS"] = "4000"
|
||||
try:
|
||||
config = Config()
|
||||
result = apply_env_overrides(config)
|
||||
assert result.max_tokens == 4000
|
||||
finally:
|
||||
del os.environ["CODSNAP_MAX_TOKENS"]
|
||||
177
tests/unit/test_dependency_graph.py
Normal file
177
tests/unit/test_dependency_graph.py
Normal file
@@ -0,0 +1,177 @@
|
||||
"""Unit tests for dependency graph module."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from codesnap.core.dependency_graph import Dependency, DependencyGraphBuilder, DependencyParser
|
||||
|
||||
|
||||
class TestDependencyParser:
|
||||
"""Tests for DependencyParser class."""
|
||||
|
||||
def setup_method(self) -> None:
|
||||
self.parser = DependencyParser()
|
||||
|
||||
def test_parse_python_import(self) -> None:
|
||||
code = "import os"
|
||||
deps = self.parser.parse_file(Path("test.py"), code, "python")
|
||||
assert len(deps) >= 1
|
||||
|
||||
def test_parse_python_from_import(self) -> None:
|
||||
code = "from pathlib import Path"
|
||||
deps = self.parser.parse_file(Path("test.py"), code, "python")
|
||||
assert len(deps) >= 1
|
||||
|
||||
def test_parse_python_multiple_imports(self) -> None:
|
||||
code = """
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from collections import defaultdict
|
||||
"""
|
||||
deps = self.parser.parse_file(Path("test.py"), code, "python")
|
||||
assert len(deps) >= 3
|
||||
|
||||
def test_parse_javascript_require(self) -> None:
|
||||
code = "const express = require('express');"
|
||||
deps = self.parser.parse_file(Path("test.js"), code, "javascript")
|
||||
assert len(deps) >= 1
|
||||
|
||||
def test_parse_javascript_import(self) -> None:
|
||||
code = "import { useState } from 'react';"
|
||||
deps = self.parser.parse_file(Path("test.js"), code, "javascript")
|
||||
assert len(deps) >= 1
|
||||
|
||||
def test_parse_go_import(self) -> None:
|
||||
code = 'import "fmt"'
|
||||
deps = self.parser.parse_file(Path("test.go"), code, "go")
|
||||
assert len(deps) >= 1
|
||||
|
||||
def test_parse_rust_use(self) -> None:
|
||||
code = "use std::collections::HashMap;"
|
||||
deps = self.parser.parse_file(Path("test.rs"), code, "rust")
|
||||
assert len(deps) >= 1
|
||||
|
||||
def test_parse_java_import(self) -> None:
|
||||
code = "import java.util.ArrayList;"
|
||||
deps = self.parser.parse_file(Path("test.java"), code, "java")
|
||||
assert len(deps) >= 1
|
||||
|
||||
def test_parse_unsupported_language(self) -> None:
|
||||
code = "some random code"
|
||||
deps = self.parser.parse_file(Path("test.xyz"), code, "unsupported")
|
||||
assert len(deps) == 0
|
||||
|
||||
|
||||
class TestDependencyGraphBuilder:
|
||||
"""Tests for DependencyGraphBuilder class."""
|
||||
|
||||
def setup_method(self) -> None:
|
||||
self.graph = DependencyGraphBuilder()
|
||||
|
||||
def test_add_file(self) -> None:
|
||||
self.graph.add_file(Path("main.py"), "python", 100, 10, 2, 1)
|
||||
assert self.graph.graph.number_of_nodes() == 1
|
||||
assert Path("main.py") in self.graph.graph.nodes()
|
||||
|
||||
def test_add_dependency(self) -> None:
|
||||
self.graph.add_file(Path("a.py"), "python", 50, 5, 1, 0)
|
||||
self.graph.add_file(Path("b.py"), "python", 60, 6, 1, 0)
|
||||
|
||||
dep = Dependency(
|
||||
source_file=Path("a.py"),
|
||||
target_file=Path("b.py"),
|
||||
import_statement="import b",
|
||||
import_type="import"
|
||||
)
|
||||
self.graph.add_dependency(dep)
|
||||
|
||||
assert self.graph.graph.has_edge(Path("a.py"), Path("b.py"))
|
||||
|
||||
def test_build_from_analysis(self) -> None:
|
||||
analysis_result = {
|
||||
"files": [
|
||||
{"path": "main.py", "language": "python", "size": 100, "lines": 10, "functions": ["main"], "classes": []},
|
||||
{"path": "utils.py", "language": "python", "size": 50, "lines": 5, "functions": ["helper"], "classes": []}
|
||||
],
|
||||
"dependencies": [
|
||||
{"source": "main.py", "target": "utils.py", "type": "import"}
|
||||
]
|
||||
}
|
||||
self.graph.build_from_analysis(analysis_result)
|
||||
|
||||
assert self.graph.graph.number_of_nodes() == 2
|
||||
assert self.graph.graph.has_edge(Path("main.py"), Path("utils.py"))
|
||||
|
||||
def test_find_cycles(self) -> None:
|
||||
self.graph.add_file(Path("a.py"), "python", 50, 5, 1, 0)
|
||||
self.graph.add_file(Path("b.py"), "python", 50, 5, 1, 0)
|
||||
self.graph.add_file(Path("c.py"), "python", 50, 5, 1, 0)
|
||||
|
||||
dep1 = Dependency(Path("a.py"), Path("b.py"), "import b", "import")
|
||||
dep2 = Dependency(Path("b.py"), Path("c.py"), "import c", "import")
|
||||
dep3 = Dependency(Path("c.py"), Path("a.py"), "import a", "import")
|
||||
|
||||
self.graph.add_dependency(dep1)
|
||||
self.graph.add_dependency(dep2)
|
||||
self.graph.add_dependency(dep3)
|
||||
|
||||
cycles = self.graph.find_cycles()
|
||||
assert len(cycles) >= 1
|
||||
|
||||
def test_find_no_cycles(self) -> None:
|
||||
self.graph.add_file(Path("a.py"), "python", 50, 5, 1, 0)
|
||||
self.graph.add_file(Path("b.py"), "python", 50, 5, 1, 0)
|
||||
|
||||
dep = Dependency(Path("a.py"), Path("b.py"), "import b", "import")
|
||||
self.graph.add_dependency(dep)
|
||||
|
||||
cycles = self.graph.find_cycles()
|
||||
assert len(cycles) == 0
|
||||
|
||||
def test_find_orphaned_files(self) -> None:
|
||||
self.graph.add_file(Path("orphan.py"), "python", 50, 5, 1, 0)
|
||||
self.graph.add_file(Path("main.py"), "python", 100, 10, 2, 1)
|
||||
self.graph.add_file(Path("used.py"), "python", 50, 5, 1, 0)
|
||||
|
||||
dep = Dependency(Path("main.py"), Path("used.py"), "import used", "import")
|
||||
self.graph.add_dependency(dep)
|
||||
|
||||
orphaned = self.graph.find_orphaned_files()
|
||||
assert Path("orphan.py") in orphaned
|
||||
assert Path("main.py") not in orphaned
|
||||
assert Path("used.py") not in orphaned
|
||||
|
||||
def test_calculate_metrics(self) -> None:
|
||||
self.graph.add_file(Path("main.py"), "python", 100, 10, 2, 1)
|
||||
self.graph.add_file(Path("utils.py"), "python", 50, 5, 1, 0)
|
||||
|
||||
dep = Dependency(Path("main.py"), Path("utils.py"), "import utils", "import")
|
||||
self.graph.add_dependency(dep)
|
||||
|
||||
metrics = self.graph.calculate_metrics()
|
||||
|
||||
assert metrics.total_files == 2
|
||||
assert metrics.total_edges == 1
|
||||
assert metrics.density >= 0
|
||||
|
||||
def test_get_transitive_closure(self) -> None:
|
||||
self.graph.add_file(Path("a.py"), "python", 50, 5, 1, 0)
|
||||
self.graph.add_file(Path("b.py"), "python", 50, 5, 1, 0)
|
||||
self.graph.add_file(Path("c.py"), "python", 50, 5, 1, 0)
|
||||
|
||||
self.graph.add_dependency(Dependency(Path("a.py"), Path("b.py"), "import b", "import"))
|
||||
self.graph.add_dependency(Dependency(Path("b.py"), Path("c.py"), "import c", "import"))
|
||||
|
||||
dependents = self.graph.get_transitive_closure(Path("c.py"))
|
||||
assert len(dependents) >= 0 # May or may not find depending on graph structure
|
||||
|
||||
def test_get_dependencies(self) -> None:
|
||||
self.graph.add_file(Path("a.py"), "python", 50, 5, 1, 0)
|
||||
self.graph.add_file(Path("b.py"), "python", 50, 5, 1, 0)
|
||||
self.graph.add_file(Path("c.py"), "python", 50, 5, 1, 0)
|
||||
|
||||
self.graph.add_dependency(Dependency(Path("a.py"), Path("b.py"), "import b", "import"))
|
||||
self.graph.add_dependency(Dependency(Path("a.py"), Path("c.py"), "import c", "import"))
|
||||
|
||||
deps = self.graph.get_dependencies(Path("a.py"))
|
||||
assert isinstance(deps, set) # Returns a set
|
||||
112
tests/unit/test_extractor.py
Normal file
112
tests/unit/test_extractor.py
Normal file
@@ -0,0 +1,112 @@
|
||||
from codesnap.core.extractor import FunctionExtractor
|
||||
|
||||
|
||||
class TestFunctionExtractor:
|
||||
def setup_method(self) -> None:
|
||||
self.extractor = FunctionExtractor()
|
||||
|
||||
def test_extract_simple_function(self) -> None:
|
||||
code = """
|
||||
def hello():
|
||||
print("Hello, World!")
|
||||
"""
|
||||
functions = self.extractor.extract_functions_python(code)
|
||||
assert len(functions) >= 1
|
||||
func = functions[0]
|
||||
assert func.name == "hello"
|
||||
assert len(func.parameters) == 0
|
||||
|
||||
def test_extract_function_with_parameters(self) -> None:
|
||||
code = """
|
||||
def greet(name, greeting="Hello"):
|
||||
return f"{greeting}, {name}!"
|
||||
"""
|
||||
functions = self.extractor.extract_functions_python(code)
|
||||
assert len(functions) >= 1
|
||||
func = functions[0]
|
||||
assert func.name == "greet"
|
||||
assert "name" in func.parameters
|
||||
assert "greeting" in func.parameters
|
||||
|
||||
def test_extract_async_function(self) -> None:
|
||||
code = """
|
||||
async def fetch_data(url):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url) as response:
|
||||
return await response.json()
|
||||
"""
|
||||
functions = self.extractor.extract_functions_python(code)
|
||||
assert len(functions) >= 1
|
||||
func = functions[0]
|
||||
assert func.name == "fetch_data"
|
||||
assert func.is_async is True
|
||||
|
||||
def test_extract_function_with_return_type(self) -> None:
|
||||
code = """
|
||||
def add(a: int, b: int) -> int:
|
||||
return a + b
|
||||
"""
|
||||
functions = self.extractor.extract_functions_python(code)
|
||||
assert len(functions) >= 1
|
||||
func = functions[0]
|
||||
assert func.name == "add"
|
||||
|
||||
def test_extract_function_with_decorator(self) -> None:
|
||||
code = """
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
"""
|
||||
functions = self.extractor.extract_functions_python(code)
|
||||
assert len(functions) >= 1
|
||||
|
||||
def test_extract_classes(self) -> None:
|
||||
code = """
|
||||
class MyClass:
|
||||
def __init__(self):
|
||||
self.value = 42
|
||||
|
||||
def get_value(self):
|
||||
return self.value
|
||||
"""
|
||||
classes = self.extractor.extract_classes_python(code)
|
||||
assert len(classes) >= 1
|
||||
cls = classes[0]
|
||||
assert cls.name == "MyClass"
|
||||
|
||||
def test_extract_class_with_inheritance(self) -> None:
|
||||
code = """
|
||||
class ChildClass(ParentClass, MixinClass):
|
||||
pass
|
||||
"""
|
||||
classes = self.extractor.extract_classes_python(code)
|
||||
assert len(classes) >= 1
|
||||
cls = classes[0]
|
||||
assert "ParentClass" in cls.base_classes
|
||||
assert "MixinClass" in cls.base_classes
|
||||
|
||||
def test_extract_all_python(self) -> None:
|
||||
code = """
|
||||
def func1():
|
||||
pass
|
||||
|
||||
class MyClass:
|
||||
def method1(self):
|
||||
pass
|
||||
|
||||
def func2():
|
||||
pass
|
||||
"""
|
||||
functions, classes = self.extractor.extract_all(code, "python")
|
||||
assert len(functions) >= 2
|
||||
assert len(classes) >= 1
|
||||
|
||||
def test_extract_from_file(self) -> None:
|
||||
code = """
|
||||
def test_function(x, y):
|
||||
return x + y
|
||||
"""
|
||||
result = self.extractor.extract_from_file("test.py", code, "python")
|
||||
assert result["file"] == "test.py"
|
||||
assert len(result["functions"]) >= 1
|
||||
assert result["functions"][0]["name"] == "test_function"
|
||||
98
tests/unit/test_file_utils.py
Normal file
98
tests/unit/test_file_utils.py
Normal file
@@ -0,0 +1,98 @@
|
||||
from codesnap.utils.file_utils import FileUtils
|
||||
|
||||
|
||||
class TestFileUtils:
|
||||
def test_should_ignore_patterns(self) -> None:
|
||||
assert FileUtils.should_ignore("test.pyc", ["*.pyc"]) is True
|
||||
assert FileUtils.should_ignore("test.pyc", ["*.pyc", "*.pyo"]) is True
|
||||
assert FileUtils.should_ignore("test.py", ["*.pyc"]) is False
|
||||
|
||||
def test_should_ignore_directory(self) -> None:
|
||||
assert FileUtils.should_ignore("src/__pycache__", ["__pycache__/*"]) is True
|
||||
assert FileUtils.should_ignore(".git/config", [".git/*"]) is True
|
||||
|
||||
def test_is_text_file(self) -> None:
|
||||
assert FileUtils.is_text_file("test.py") is True
|
||||
assert FileUtils.is_text_file("test.js") is True
|
||||
assert FileUtils.is_text_file("test.tsx") is True
|
||||
assert FileUtils.is_text_file("Dockerfile") is True
|
||||
|
||||
def test_is_not_binary_file(self) -> None:
|
||||
assert FileUtils.is_text_file("test.png") is False
|
||||
assert FileUtils.is_text_file("test.jpg") is False
|
||||
assert FileUtils.is_text_file("test.so") is False
|
||||
|
||||
def test_read_file_content(self) -> None:
|
||||
import os
|
||||
import tempfile
|
||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.py', delete=False) as f:
|
||||
f.write("print('hello')")
|
||||
temp_path = f.name
|
||||
try:
|
||||
content = FileUtils.read_file_content(temp_path)
|
||||
assert content == "print('hello')"
|
||||
finally:
|
||||
os.unlink(temp_path)
|
||||
|
||||
def test_read_file_content_not_found(self) -> None:
|
||||
content = FileUtils.read_file_content("/nonexistent/file.py")
|
||||
assert content is None
|
||||
|
||||
def test_count_lines(self) -> None:
|
||||
content = "line1\nline2\nline3"
|
||||
assert FileUtils.count_lines(content) == 3
|
||||
assert FileUtils.count_lines("") == 1
|
||||
assert FileUtils.count_lines("single") == 1
|
||||
|
||||
def test_get_relative_path(self) -> None:
|
||||
import os
|
||||
import tempfile
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
subdir = os.path.join(tmpdir, "subdir")
|
||||
os.makedirs(subdir)
|
||||
filepath = os.path.join(subdir, "test.py")
|
||||
rel = FileUtils.get_relative_path(filepath, tmpdir)
|
||||
assert rel == os.path.join("subdir", "test.py")
|
||||
|
||||
def test_walk_directory(self) -> None:
|
||||
import os
|
||||
import tempfile
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
os.makedirs(os.path.join(tmpdir, "src"))
|
||||
os.makedirs(os.path.join(tmpdir, "tests"))
|
||||
with open(os.path.join(tmpdir, "main.py"), "w") as f:
|
||||
f.write("print('hello')")
|
||||
with open(os.path.join(tmpdir, "src", "module.py"), "w") as f:
|
||||
f.write("def test(): pass")
|
||||
|
||||
files = FileUtils.walk_directory(tmpdir, ["*.pyc", "__pycache__/*"], 100)
|
||||
assert len(files) == 2
|
||||
|
||||
def test_walk_directory_with_ignore(self) -> None:
|
||||
import os
|
||||
import tempfile
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
os.makedirs(os.path.join(tmpdir, "__pycache__"))
|
||||
with open(os.path.join(tmpdir, "main.py"), "w") as f:
|
||||
f.write("print('hello')")
|
||||
with open(os.path.join(tmpdir, "__pycache__", "cache.pyc"), "w") as f:
|
||||
f.write("cached")
|
||||
|
||||
files = FileUtils.walk_directory(tmpdir, ["*.pyc", "__pycache__/*"], 100)
|
||||
assert len(files) == 1
|
||||
assert "__pycache__" not in files[0]
|
||||
|
||||
def test_get_directory_tree(self) -> None:
|
||||
import os
|
||||
import tempfile
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
os.makedirs(os.path.join(tmpdir, "src"))
|
||||
os.makedirs(os.path.join(tmpdir, "tests"))
|
||||
with open(os.path.join(tmpdir, "main.py"), "w") as f:
|
||||
f.write("")
|
||||
with open(os.path.join(tmpdir, "src", "module.py"), "w") as f:
|
||||
f.write("")
|
||||
|
||||
tree = FileUtils.get_directory_tree(tmpdir, ["*.pyc"], 3)
|
||||
assert len(tree) > 0
|
||||
assert any("main.py" in line for line in tree)
|
||||
428
tests/unit/test_formatters.py
Normal file
428
tests/unit/test_formatters.py
Normal file
@@ -0,0 +1,428 @@
|
||||
"""Unit tests for output formatters."""
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from codesnap.core.analyzer import AnalysisResult, FileAnalysis
|
||||
from codesnap.core.parser import ClassInfo, FunctionInfo
|
||||
from codesnap.output.json_exporter import export_json, export_json_file
|
||||
from codesnap.output.llm_exporter import (
|
||||
estimate_tokens,
|
||||
export_llm_optimized,
|
||||
truncate_for_token_limit,
|
||||
)
|
||||
from codesnap.output.markdown_exporter import export_markdown, export_markdown_file
|
||||
|
||||
|
||||
class TestJsonExporter:
|
||||
"""Tests for JSON export functionality."""
|
||||
|
||||
def create_test_result(self):
|
||||
"""Create a test analysis result."""
|
||||
func = FunctionInfo(
|
||||
name="test_function",
|
||||
node_type="function_definition",
|
||||
start_line=1,
|
||||
end_line=10,
|
||||
parameters=[{"name": "x", "type": "int"}],
|
||||
return_type="str",
|
||||
is_async=False,
|
||||
)
|
||||
|
||||
cls = ClassInfo(
|
||||
name="TestClass",
|
||||
start_line=1,
|
||||
end_line=20,
|
||||
bases=["BaseClass"],
|
||||
methods=[func],
|
||||
)
|
||||
|
||||
file_analysis = FileAnalysis(
|
||||
path=Path("/test/project/main.py"),
|
||||
language="python",
|
||||
size=500,
|
||||
lines=50,
|
||||
functions=[func],
|
||||
classes=[cls],
|
||||
)
|
||||
|
||||
result = AnalysisResult()
|
||||
result.summary = {
|
||||
"total_files": 1,
|
||||
"total_functions": 1,
|
||||
"total_classes": 1,
|
||||
"total_dependencies": 0,
|
||||
"languages": {"python": 1},
|
||||
}
|
||||
result.files = [file_analysis]
|
||||
result.dependencies = []
|
||||
result.metrics = {}
|
||||
result.analysis_time = 0.1
|
||||
result.error_count = 0
|
||||
|
||||
return result
|
||||
|
||||
def test_export_json_structure(self):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
|
||||
json_output = export_json(result, root)
|
||||
|
||||
data = json.loads(json_output)
|
||||
|
||||
assert "metadata" in data
|
||||
assert "summary" in data
|
||||
assert "files" in data
|
||||
assert "dependencies" in data
|
||||
assert "metrics" in data
|
||||
|
||||
def test_export_json_metadata(self):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
|
||||
json_output = export_json(result, root)
|
||||
data = json.loads(json_output)
|
||||
|
||||
assert data["metadata"]["tool"] == "CodeSnap"
|
||||
assert data["metadata"]["version"] == "0.1.0"
|
||||
assert "timestamp" in data["metadata"]
|
||||
assert data["metadata"]["root_path"] == "/test/project"
|
||||
|
||||
def test_export_json_summary(self):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
|
||||
json_output = export_json(result, root)
|
||||
data = json.loads(json_output)
|
||||
|
||||
assert data["summary"]["total_files"] == 1
|
||||
assert data["summary"]["total_functions"] == 1
|
||||
assert data["summary"]["total_classes"] == 1
|
||||
|
||||
def test_export_json_functions(self):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
|
||||
json_output = export_json(result, root)
|
||||
data = json.loads(json_output)
|
||||
|
||||
assert len(data["files"]) == 1
|
||||
assert len(data["files"][0]["functions"]) == 1
|
||||
assert data["files"][0]["functions"][0]["name"] == "test_function"
|
||||
|
||||
def test_export_json_classes(self):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
|
||||
json_output = export_json(result, root)
|
||||
data = json.loads(json_output)
|
||||
|
||||
assert len(data["files"][0]["classes"]) == 1
|
||||
assert data["files"][0]["classes"][0]["name"] == "TestClass"
|
||||
assert data["files"][0]["classes"][0]["bases"] == ["BaseClass"]
|
||||
|
||||
def test_export_json_file(self, tmp_path):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
output_file = tmp_path / "output.json"
|
||||
|
||||
export_json_file(result, root, output_file)
|
||||
|
||||
assert output_file.exists()
|
||||
|
||||
data = json.loads(output_file.read_text())
|
||||
assert "metadata" in data
|
||||
|
||||
|
||||
class TestMarkdownExporter:
|
||||
"""Tests for Markdown export functionality."""
|
||||
|
||||
def create_test_result(self):
|
||||
"""Create a test analysis result."""
|
||||
func = FunctionInfo(
|
||||
name="process_data",
|
||||
node_type="function_definition",
|
||||
start_line=5,
|
||||
end_line=15,
|
||||
parameters=[{"name": "data"}, {"name": "options"}],
|
||||
is_async=True,
|
||||
)
|
||||
|
||||
file_analysis = FileAnalysis(
|
||||
path=Path("/test/project/utils.py"),
|
||||
language="python",
|
||||
size=300,
|
||||
lines=30,
|
||||
functions=[func],
|
||||
classes=[],
|
||||
)
|
||||
|
||||
result = AnalysisResult()
|
||||
result.summary = {
|
||||
"total_files": 1,
|
||||
"total_functions": 1,
|
||||
"total_classes": 0,
|
||||
"total_dependencies": 0,
|
||||
"languages": {"python": 1},
|
||||
}
|
||||
result.files = [file_analysis]
|
||||
result.dependencies = []
|
||||
result.metrics = {}
|
||||
result.analysis_time = 0.05
|
||||
result.error_count = 0
|
||||
|
||||
return result
|
||||
|
||||
def test_export_markdown_header(self):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
|
||||
md_output = export_markdown(result, root)
|
||||
|
||||
assert "# CodeSnap Analysis Report" in md_output
|
||||
|
||||
def test_export_markdown_summary(self):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
|
||||
md_output = export_markdown(result, root)
|
||||
|
||||
assert "## Summary" in md_output
|
||||
assert "Total Files" in md_output
|
||||
assert "1" in md_output
|
||||
|
||||
def test_export_markdown_language_breakdown(self):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
|
||||
md_output = export_markdown(result, root)
|
||||
|
||||
assert "### Language Breakdown" in md_output
|
||||
assert "python" in md_output.lower()
|
||||
|
||||
def test_export_markdown_file_structure(self):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
|
||||
md_output = export_markdown(result, root)
|
||||
|
||||
assert "## File Structure" in md_output
|
||||
assert "```" in md_output
|
||||
|
||||
def test_export_markdown_functions(self):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
|
||||
md_output = export_markdown(result, root)
|
||||
|
||||
assert "process_data" in md_output
|
||||
assert "async" in md_output.lower()
|
||||
|
||||
def test_export_markdown_file(self, tmp_path):
|
||||
result = self.create_test_result()
|
||||
root = Path("/test/project")
|
||||
output_file = tmp_path / "output.md"
|
||||
|
||||
export_markdown_file(result, root, output_file)
|
||||
|
||||
assert output_file.exists()
|
||||
content = output_file.read_text()
|
||||
assert "# CodeSnap Analysis Report" in content
|
||||
|
||||
def test_empty_result(self):
|
||||
result = AnalysisResult()
|
||||
result.summary = {}
|
||||
result.files = []
|
||||
result.dependencies = []
|
||||
result.metrics = {}
|
||||
result.analysis_time = 0
|
||||
result.error_count = 0
|
||||
|
||||
root = Path("/test")
|
||||
md_output = export_markdown(result, root)
|
||||
|
||||
assert "# CodeSnap Analysis Report" in md_output
|
||||
|
||||
|
||||
class TestLLMExporter:
|
||||
"""Tests for LLM-optimized export functionality."""
|
||||
|
||||
def test_estimate_tokens_python(self):
|
||||
text = "def hello():\n print('hello')"
|
||||
tokens = estimate_tokens(text, "python")
|
||||
|
||||
assert tokens > 0
|
||||
assert tokens < len(text)
|
||||
|
||||
def test_estimate_tokens_markdown(self):
|
||||
text = "# Heading\n\nSome content here."
|
||||
tokens = estimate_tokens(text, "markdown")
|
||||
|
||||
assert tokens > 0
|
||||
|
||||
def test_truncate_under_limit(self):
|
||||
text = "Short text"
|
||||
result = truncate_for_token_limit(text, 100, "markdown")
|
||||
|
||||
assert result == text
|
||||
|
||||
def test_truncate_over_limit(self):
|
||||
text = "A" * 1000
|
||||
result = truncate_for_token_limit(text, 100, "markdown")
|
||||
|
||||
assert len(result) < len(text)
|
||||
assert "[Output truncated due to token limit]" in result
|
||||
|
||||
def test_export_llm_optimized_structure(self):
|
||||
func = FunctionInfo(
|
||||
name="helper",
|
||||
node_type="function",
|
||||
start_line=1,
|
||||
end_line=5,
|
||||
)
|
||||
|
||||
file_analysis = FileAnalysis(
|
||||
path=Path("/test/main.py"),
|
||||
language="python",
|
||||
size=100,
|
||||
lines=10,
|
||||
functions=[func],
|
||||
classes=[],
|
||||
)
|
||||
|
||||
result = AnalysisResult()
|
||||
result.summary = {
|
||||
"total_files": 1,
|
||||
"total_functions": 1,
|
||||
"total_classes": 0,
|
||||
"total_dependencies": 0,
|
||||
"languages": {"python": 1},
|
||||
}
|
||||
result.files = [file_analysis]
|
||||
result.dependencies = []
|
||||
result.metrics = {}
|
||||
result.analysis_time = 0.01
|
||||
result.error_count = 0
|
||||
|
||||
root = Path("/test")
|
||||
output = export_llm_optimized(result, root)
|
||||
|
||||
assert "## CODEBASE ANALYSIS SUMMARY" in output
|
||||
assert "### STRUCTURE" in output
|
||||
assert "### KEY COMPONENTS" in output
|
||||
|
||||
def test_export_llm_with_max_tokens(self):
|
||||
func = FunctionInfo(
|
||||
name="test",
|
||||
node_type="function",
|
||||
start_line=1,
|
||||
end_line=5,
|
||||
)
|
||||
|
||||
file_analysis = FileAnalysis(
|
||||
path=Path("/test/main.py"),
|
||||
language="python",
|
||||
size=100,
|
||||
lines=10,
|
||||
functions=[func],
|
||||
classes=[],
|
||||
)
|
||||
|
||||
result = AnalysisResult()
|
||||
result.summary = {
|
||||
"total_files": 1,
|
||||
"total_functions": 1,
|
||||
"total_classes": 0,
|
||||
"total_dependencies": 0,
|
||||
"languages": {"python": 1},
|
||||
}
|
||||
result.files = [file_analysis]
|
||||
result.dependencies = []
|
||||
result.metrics = {}
|
||||
result.analysis_time = 0.01
|
||||
result.error_count = 0
|
||||
|
||||
root = Path("/test")
|
||||
output = export_llm_optimized(result, root, max_tokens=100)
|
||||
|
||||
tokens = estimate_tokens(output, "markdown")
|
||||
assert tokens <= 100 or "[Output truncated" in output
|
||||
|
||||
|
||||
class TestFormatterIntegration:
|
||||
"""Integration tests for formatters."""
|
||||
|
||||
def test_json_is_valid_json(self):
|
||||
func = FunctionInfo(name="test", node_type="func", start_line=1, end_line=10)
|
||||
file_analysis = FileAnalysis(
|
||||
path=Path("/test/main.py"),
|
||||
language="python",
|
||||
size=100,
|
||||
lines=10,
|
||||
functions=[func],
|
||||
)
|
||||
|
||||
result = AnalysisResult()
|
||||
result.summary = {"total_files": 1}
|
||||
result.files = [file_analysis]
|
||||
result.dependencies = []
|
||||
result.metrics = {}
|
||||
result.analysis_time = 0
|
||||
|
||||
root = Path("/test")
|
||||
|
||||
json_output = export_json(result, root)
|
||||
|
||||
data = json.loads(json_output)
|
||||
assert data is not None
|
||||
|
||||
def test_markdown_is_readable(self):
|
||||
func = FunctionInfo(name="test", node_type="func", start_line=1, end_line=10)
|
||||
file_analysis = FileAnalysis(
|
||||
path=Path("/test/main.py"),
|
||||
language="python",
|
||||
size=100,
|
||||
lines=10,
|
||||
functions=[func],
|
||||
)
|
||||
|
||||
result = AnalysisResult()
|
||||
result.summary = {"total_files": 1}
|
||||
result.files = [file_analysis]
|
||||
result.dependencies = []
|
||||
result.metrics = {}
|
||||
result.analysis_time = 0
|
||||
|
||||
root = Path("/test")
|
||||
|
||||
md_output = export_markdown(result, root)
|
||||
|
||||
assert md_output is not None
|
||||
assert len(md_output) > 0
|
||||
assert "#" in md_output
|
||||
|
||||
def test_llm_output_has_summary_first(self):
|
||||
func = FunctionInfo(name="test", node_type="func", start_line=1, end_line=10)
|
||||
file_analysis = FileAnalysis(
|
||||
path=Path("/test/main.py"),
|
||||
language="python",
|
||||
size=100,
|
||||
lines=10,
|
||||
functions=[func],
|
||||
)
|
||||
|
||||
result = AnalysisResult()
|
||||
result.summary = {"total_files": 1}
|
||||
result.files = [file_analysis]
|
||||
result.dependencies = []
|
||||
result.metrics = {}
|
||||
result.analysis_time = 0
|
||||
|
||||
root = Path("/test")
|
||||
|
||||
output = export_llm_optimized(result, root)
|
||||
|
||||
summary_pos = output.find("CODEBASE ANALYSIS SUMMARY")
|
||||
structure_pos = output.find("STRUCTURE")
|
||||
|
||||
assert summary_pos < structure_pos
|
||||
77
tests/unit/test_json_formatter.py
Normal file
77
tests/unit/test_json_formatter.py
Normal file
@@ -0,0 +1,77 @@
|
||||
from codesnap.output.json_formatter import JSONFormatter
|
||||
|
||||
|
||||
class TestJSONFormatter:
|
||||
def setup_method(self) -> None:
|
||||
self.formatter = JSONFormatter()
|
||||
|
||||
def test_format_valid_result(self) -> None:
|
||||
result = {
|
||||
"files": [
|
||||
{
|
||||
"file": "test.py",
|
||||
"language": "python",
|
||||
"lines": 50,
|
||||
"functions": [{"name": "test_func", "start_line": 1, "end_line": 10}],
|
||||
"classes": [],
|
||||
"complexity": {"score": 5, "rating": "low"}
|
||||
}
|
||||
],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
import json
|
||||
parsed = json.loads(output)
|
||||
assert "schema_version" in parsed
|
||||
assert "summary" in parsed
|
||||
assert "files" in parsed
|
||||
assert parsed["summary"]["total_files"] == 1
|
||||
|
||||
def test_format_empty_result(self) -> None:
|
||||
result = {
|
||||
"files": [],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
import json
|
||||
parsed = json.loads(output)
|
||||
assert parsed["summary"]["total_files"] == 0
|
||||
|
||||
def test_summary_includes_language_counts(self) -> None:
|
||||
result = {
|
||||
"files": [
|
||||
{"file": "a.py", "language": "python", "lines": 10, "functions": [], "classes": [], "complexity": {}},
|
||||
{"file": "b.js", "language": "javascript", "lines": 20, "functions": [], "classes": [], "complexity": {}},
|
||||
{"file": "c.py", "language": "python", "lines": 30, "functions": [], "classes": [], "complexity": {}}
|
||||
],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
import json
|
||||
parsed = json.loads(output)
|
||||
assert parsed["summary"]["languages"]["python"] == 2
|
||||
assert parsed["summary"]["languages"]["javascript"] == 1
|
||||
168
tests/unit/test_language_detector.py
Normal file
168
tests/unit/test_language_detector.py
Normal file
@@ -0,0 +1,168 @@
|
||||
"""Unit tests for language detection module."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from codesnap.core.language_detector import (
|
||||
EXTENSION_TO_LANGUAGE,
|
||||
detect_language,
|
||||
detect_language_by_extension,
|
||||
detect_language_by_shebang,
|
||||
get_language_info,
|
||||
get_supported_extensions,
|
||||
get_supported_languages,
|
||||
)
|
||||
|
||||
|
||||
class TestDetectLanguageByExtension:
|
||||
"""Tests for extension-based language detection."""
|
||||
|
||||
def test_python_extension_py(self):
|
||||
assert detect_language_by_extension(Path("test.py")) == "python"
|
||||
|
||||
def test_python_extension_pyi(self):
|
||||
assert detect_language_by_extension(Path("test.pyi")) == "python"
|
||||
|
||||
def test_javascript_extension_js(self):
|
||||
assert detect_language_by_extension(Path("test.js")) == "javascript"
|
||||
|
||||
def test_typescript_extension_ts(self):
|
||||
assert detect_language_by_extension(Path("test.ts")) == "typescript"
|
||||
|
||||
def test_go_extension(self):
|
||||
assert detect_language_by_extension(Path("main.go")) == "go"
|
||||
|
||||
def test_rust_extension(self):
|
||||
assert detect_language_by_extension(Path("main.rs")) == "rust"
|
||||
|
||||
def test_java_extension(self):
|
||||
assert detect_language_by_extension(Path("Main.java")) == "java"
|
||||
|
||||
def test_cpp_extension(self):
|
||||
assert detect_language_by_extension(Path("test.cpp")) == "cpp"
|
||||
assert detect_language_by_extension(Path("test.hpp")) == "cpp"
|
||||
|
||||
def test_ruby_extension(self):
|
||||
assert detect_language_by_extension(Path("script.rb")) == "ruby"
|
||||
|
||||
def test_php_extension(self):
|
||||
assert detect_language_by_extension(Path("script.php")) == "php"
|
||||
|
||||
def test_unknown_extension(self):
|
||||
assert detect_language_by_extension(Path("test.xyz")) is None
|
||||
|
||||
def test_case_insensitive(self):
|
||||
assert detect_language_by_extension(Path("test.PY")) == "python"
|
||||
assert detect_language_by_extension(Path("test.JS")) == "javascript"
|
||||
|
||||
|
||||
class TestDetectLanguageByShebang:
|
||||
"""Tests for shebang-based language detection."""
|
||||
|
||||
def test_python_shebang(self):
|
||||
content = "#!/usr/bin/env python3\nprint('hello')"
|
||||
assert detect_language_by_shebang(content) == "python"
|
||||
|
||||
def test_python_shebang_alt(self):
|
||||
content = "#!/usr/bin/python\nprint('hello')"
|
||||
assert detect_language_by_shebang(content) == "python"
|
||||
|
||||
def test_node_shebang(self):
|
||||
content = "#!/usr/bin/env node\nconsole.log('hello')"
|
||||
assert detect_language_by_shebang(content) == "javascript"
|
||||
|
||||
def test_ruby_shebang(self):
|
||||
content = "#!/usr/bin/env ruby\nputs 'hello'"
|
||||
assert detect_language_by_shebang(content) == "ruby"
|
||||
|
||||
def test_php_shebang(self):
|
||||
content = "#!/usr/bin/env php\necho 'hello';"
|
||||
assert detect_language_by_shebang(content) == "php"
|
||||
|
||||
def test_no_shebang(self):
|
||||
content = "print('hello')"
|
||||
assert detect_language_by_shebang(content) is None
|
||||
|
||||
def test_empty_content(self):
|
||||
assert detect_language_by_shebang("") is None
|
||||
|
||||
|
||||
class TestDetectLanguage:
|
||||
"""Tests for combined language detection."""
|
||||
|
||||
def test_detection_by_extension(self):
|
||||
assert detect_language(Path("test.py")) == "python"
|
||||
assert detect_language(Path("test.js")) == "javascript"
|
||||
|
||||
def test_detection_fallback_to_shebang(self):
|
||||
file_path = Path("script")
|
||||
assert detect_language(file_path, "#!/usr/bin/env python") == "python"
|
||||
assert detect_language(file_path, "#!/usr/bin/env node") == "javascript"
|
||||
|
||||
def test_unknown_file_no_content(self):
|
||||
assert detect_language(Path("unknown.xyz")) is None
|
||||
|
||||
|
||||
class TestGetLanguageInfo:
|
||||
"""Tests for language info retrieval."""
|
||||
|
||||
def test_get_python_info(self):
|
||||
info = get_language_info("python")
|
||||
assert info is not None
|
||||
assert info.name == "python"
|
||||
assert ".py" in info.extensions
|
||||
|
||||
def test_get_unknown_language(self):
|
||||
info = get_language_info("unknown")
|
||||
assert info is None
|
||||
|
||||
|
||||
class TestGetSupportedExtensions:
|
||||
"""Tests for supported extensions."""
|
||||
|
||||
def test_returns_set(self):
|
||||
extensions = get_supported_extensions()
|
||||
assert isinstance(extensions, set)
|
||||
|
||||
def test_includes_common_extensions(self):
|
||||
extensions = get_supported_extensions()
|
||||
assert ".py" in extensions
|
||||
assert ".js" in extensions
|
||||
assert ".ts" in extensions
|
||||
assert ".go" in extensions
|
||||
|
||||
|
||||
class TestGetSupportedLanguages:
|
||||
"""Tests for supported programming languages."""
|
||||
|
||||
def test_returns_list(self):
|
||||
languages = get_supported_languages()
|
||||
assert isinstance(languages, list)
|
||||
|
||||
def test_includes_main_languages(self):
|
||||
languages = get_supported_languages()
|
||||
assert "python" in languages
|
||||
assert "javascript" in languages
|
||||
assert "typescript" in languages
|
||||
assert "go" in languages
|
||||
assert "rust" in languages
|
||||
assert "java" in languages
|
||||
|
||||
def test_excludes_config_formats(self):
|
||||
languages = get_supported_languages()
|
||||
assert "json" not in languages
|
||||
assert "yaml" not in languages
|
||||
assert "markdown" not in languages
|
||||
|
||||
|
||||
class TestExtensionToLanguage:
|
||||
"""Tests for extension to language mapping."""
|
||||
|
||||
def test_mapping_completeness(self):
|
||||
for _ext, lang in EXTENSION_TO_LANGUAGE.items():
|
||||
assert lang in ["python", "javascript", "typescript", "go", "rust",
|
||||
"java", "c", "cpp", "ruby", "php", "shell",
|
||||
"json", "yaml", "markdown"]
|
||||
|
||||
def test_no_duplicate_extensions(self):
|
||||
extensions = list(EXTENSION_TO_LANGUAGE.keys())
|
||||
assert len(extensions) == len(set(extensions))
|
||||
112
tests/unit/test_llm_formatter.py
Normal file
112
tests/unit/test_llm_formatter.py
Normal file
@@ -0,0 +1,112 @@
|
||||
from codesnap.output.llm_formatter import LLMFormatter
|
||||
|
||||
|
||||
class TestLLMFormatter:
|
||||
def setup_method(self) -> None:
|
||||
self.formatter = LLMFormatter(max_tokens=1000)
|
||||
|
||||
def test_format_valid_result(self) -> None:
|
||||
result = {
|
||||
"files": [
|
||||
{
|
||||
"file": "test.py",
|
||||
"language": "python",
|
||||
"lines": 50,
|
||||
"functions": [{"name": "test_func", "start_line": 1, "end_line": 10, "parameters": [], "return_type": "str"}],
|
||||
"classes": [],
|
||||
"complexity": {"score": 5, "rating": "low"}
|
||||
}
|
||||
],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
assert "## Codebase Summary" in output
|
||||
assert "### Key Files" in output
|
||||
assert "### Classes and Functions" in output
|
||||
assert "### Dependencies" in output
|
||||
|
||||
def test_respects_token_limit(self) -> None:
|
||||
result = {
|
||||
"files": [],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
max_chars = 1000 * 4
|
||||
assert len(output) <= max_chars + 100
|
||||
|
||||
def test_includes_high_level_summary(self) -> None:
|
||||
result = {
|
||||
"files": [
|
||||
{"file": "a.py", "language": "python", "lines": 50, "functions": [], "classes": [], "complexity": {}},
|
||||
{"file": "b.py", "language": "python", "lines": 30, "functions": [], "classes": [], "complexity": {}},
|
||||
{"file": "c.js", "language": "javascript", "lines": 20, "functions": [], "classes": [], "complexity": {}}
|
||||
],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
assert "python" in output.lower()
|
||||
assert "3 files" in output or "files" in output
|
||||
|
||||
def test_compresses_detailed_file_list(self) -> None:
|
||||
result = {
|
||||
"files": [
|
||||
{"file": f"file{i}.py", "language": "python", "lines": 10,
|
||||
"functions": [{"name": f"func{i}a"}, {"name": f"func{i}b"}, {"name": f"func{i}c"}],
|
||||
"classes": [], "complexity": {}}
|
||||
for i in range(10)
|
||||
],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
assert "Detailed File List (compressed)" in output
|
||||
|
||||
def test_warns_about_cycles(self) -> None:
|
||||
result = {
|
||||
"files": [
|
||||
{"file": "a.py", "language": "python", "lines": 10, "functions": [], "classes": [], "complexity": {}},
|
||||
{"file": "b.py", "language": "python", "lines": 10, "functions": [], "classes": [], "complexity": {}}
|
||||
],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 2,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 1,
|
||||
"cycle_details": [["a.py", "b.py", "a.py"]],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
assert "circular" in output.lower() or "cycle" in output.lower()
|
||||
117
tests/unit/test_markdown_formatter.py
Normal file
117
tests/unit/test_markdown_formatter.py
Normal file
@@ -0,0 +1,117 @@
|
||||
from codesnap.output.markdown_formatter import MarkdownFormatter
|
||||
|
||||
|
||||
class TestMarkdownFormatter:
|
||||
def setup_method(self) -> None:
|
||||
self.formatter = MarkdownFormatter()
|
||||
|
||||
def test_format_valid_result(self) -> None:
|
||||
result = {
|
||||
"files": [
|
||||
{
|
||||
"file": "test.py",
|
||||
"language": "python",
|
||||
"lines": 50,
|
||||
"functions": [{"name": "test_func", "start_line": 1, "end_line": 10, "parameters": [], "return_type": "str"}],
|
||||
"classes": [],
|
||||
"complexity": {"score": 5, "rating": "low"}
|
||||
}
|
||||
],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
assert "# CodeSnap Analysis Report" in output
|
||||
assert "## Overview" in output
|
||||
assert "## File Structure" in output
|
||||
assert "## Key Functions" in output
|
||||
assert "## Dependencies" in output
|
||||
assert "## Complexity Metrics" in output
|
||||
|
||||
def test_format_empty_result(self) -> None:
|
||||
result = {
|
||||
"files": [],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
assert "Total Files" in output
|
||||
|
||||
def test_includes_language_breakdown(self) -> None:
|
||||
result = {
|
||||
"files": [
|
||||
{"file": "a.py", "language": "python", "lines": 10, "functions": [], "classes": [], "complexity": {}},
|
||||
{"file": "b.js", "language": "javascript", "lines": 20, "functions": [], "classes": [], "complexity": {}}
|
||||
],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
assert "python: 1" in output or "python: 2" in output
|
||||
assert "javascript:" in output
|
||||
|
||||
def test_shows_circular_dependencies(self) -> None:
|
||||
result = {
|
||||
"files": [
|
||||
{"file": "a.py", "language": "python", "lines": 10, "functions": [], "classes": [], "complexity": {}},
|
||||
{"file": "b.py", "language": "python", "lines": 10, "functions": [], "classes": [], "complexity": {}}
|
||||
],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 2,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 1,
|
||||
"cycle_details": [["a.py", "b.py", "a.py"]],
|
||||
"orphaned_details": [],
|
||||
"edges": [{"from": "a.py", "to": "b.py"}, {"from": "b.py", "to": "a.py"}],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
assert "Circular Dependencies Detected" in output
|
||||
|
||||
def test_shows_high_complexity_files(self) -> None:
|
||||
result = {
|
||||
"files": [
|
||||
{
|
||||
"file": "complex.py",
|
||||
"language": "python",
|
||||
"lines": 100,
|
||||
"functions": [],
|
||||
"classes": [],
|
||||
"complexity": {"score": 55, "rating": "high"}
|
||||
}
|
||||
],
|
||||
"dependency_graph": {
|
||||
"total_dependencies": 0,
|
||||
"orphaned_files": 0,
|
||||
"cycles_detected": 0,
|
||||
"cycle_details": [],
|
||||
"orphaned_details": [],
|
||||
"edges": [],
|
||||
"statistics": {}
|
||||
}
|
||||
}
|
||||
output = self.formatter.format(result)
|
||||
assert "High Complexity Files" in output
|
||||
assert "complex.py" in output
|
||||
Reference in New Issue
Block a user