fix: resolve CI/CD issues - Poetry setup, type annotations, MyPy errors
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
from typing import List, Dict, Set, Optional
|
||||
from typing import Optional
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from codechunk.core.chunking import ParsedChunk
|
||||
@@ -9,19 +9,18 @@ class DependencyNode:
|
||||
chunk_name: str
|
||||
file_path: Path
|
||||
module_name: str
|
||||
dependencies: Set[str] = field(default_factory=set)
|
||||
dependents: Set[str] = field(default_factory=set)
|
||||
dependencies: set[str] = field(default_factory=set)
|
||||
dependents: set[str] = field(default_factory=set)
|
||||
is_circular: bool = False
|
||||
|
||||
|
||||
class DependencyAnalyzer:
|
||||
def __init__(self):
|
||||
self.dependency_graph: Dict[str, DependencyNode] = {}
|
||||
self.module_to_chunks: Dict[str, List[str]] = {}
|
||||
self.dependency_graph: dict[str, DependencyNode] = {}
|
||||
self.module_to_chunks: dict[str, list[str]] = {}
|
||||
|
||||
def analyze_dependencies(self, chunks: List[ParsedChunk],
|
||||
project_files: List[Path]) -> Dict[str, DependencyNode]:
|
||||
"""Analyze dependencies between chunks."""
|
||||
def analyze_dependencies(self, chunks: list[ParsedChunk],
|
||||
project_files: list[Path]) -> dict[str, DependencyNode]:
|
||||
self.dependency_graph = {}
|
||||
self.module_to_chunks = {}
|
||||
|
||||
@@ -53,16 +52,14 @@ class DependencyAnalyzer:
|
||||
|
||||
return self.dependency_graph
|
||||
|
||||
def _build_module_cache(self, project_files: List[Path]) -> Dict[Path, str]:
|
||||
"""Build cache of file to module name mappings."""
|
||||
cache = {}
|
||||
def _build_module_cache(self, project_files: list[Path]) -> dict[Path, str]:
|
||||
cache: dict[Path, str] = {}
|
||||
for file_path in project_files:
|
||||
module_name = self._get_module_name(file_path, set(project_files))
|
||||
cache[file_path] = module_name
|
||||
return cache
|
||||
|
||||
def _get_module_name(self, file_path: Path, project_root: Set[Path]) -> str:
|
||||
"""Get module name from file path."""
|
||||
def _get_module_name(self, file_path: Path, project_root: set[Path]) -> str:
|
||||
try:
|
||||
if project_root:
|
||||
root = min(project_root, key=lambda p: len(p.parts))
|
||||
@@ -84,8 +81,7 @@ class DependencyAnalyzer:
|
||||
return file_path.stem
|
||||
|
||||
def _resolve_import(self, import_str: str, current_file: Path,
|
||||
project_root: Set[Path], module_cache: Dict[Path, str]) -> Optional[str]:
|
||||
"""Resolve import string to module name."""
|
||||
project_root: set[Path], module_cache: dict[Path, str]) -> Optional[str]:
|
||||
clean_import = import_str.strip()
|
||||
|
||||
parts = clean_import.split('.')
|
||||
@@ -102,7 +98,7 @@ class DependencyAnalyzer:
|
||||
'torch', 'tensorflow', 'matplotlib', 'scipy', 'sklearn']:
|
||||
return None
|
||||
|
||||
for file_path, module_name in module_cache.items():
|
||||
for _file_path, module_name in module_cache.items():
|
||||
if module_name.endswith(base_module) or module_name == base_module:
|
||||
return module_name
|
||||
|
||||
@@ -113,19 +109,17 @@ class DependencyAnalyzer:
|
||||
|
||||
return clean_import
|
||||
|
||||
def _build_dependency_links(self):
|
||||
"""Build reverse dependency links (dependents)."""
|
||||
def _build_dependency_links(self) -> None:
|
||||
for node in self.dependency_graph.values():
|
||||
for dep in node.dependencies:
|
||||
if dep in self.dependency_graph:
|
||||
self.dependency_graph[dep].dependents.add(node.chunk_name)
|
||||
|
||||
def _detect_circular_dependencies(self):
|
||||
"""Detect circular dependencies in the graph."""
|
||||
def _detect_circular_dependencies(self) -> None:
|
||||
visited = set()
|
||||
rec_stack = set()
|
||||
|
||||
def detect_cycle(node_name: str, path: List[str]) -> bool:
|
||||
def detect_cycle(node_name: str, path: list[str]) -> bool:
|
||||
visited.add(node_name)
|
||||
rec_stack.add(node_name)
|
||||
path.append(node_name)
|
||||
@@ -150,8 +144,7 @@ class DependencyAnalyzer:
|
||||
if node_name not in visited:
|
||||
detect_cycle(node_name, [])
|
||||
|
||||
def get_essential_chunks(self, selected_chunks: List[str]) -> List[str]:
|
||||
"""Get all chunks needed including transitive dependencies."""
|
||||
def get_essential_chunks(self, selected_chunks: list[str]) -> list[str]:
|
||||
essential = set(selected_chunks)
|
||||
to_process = list(selected_chunks)
|
||||
|
||||
@@ -166,8 +159,7 @@ class DependencyAnalyzer:
|
||||
|
||||
return list(essential)
|
||||
|
||||
def get_impacted_chunks(self, modified_chunks: List[str]) -> List[str]:
|
||||
"""Get all chunks that depend on the modified chunks."""
|
||||
def get_impacted_chunks(self, modified_chunks: list[str]) -> list[str]:
|
||||
impacted = set(modified_chunks)
|
||||
to_process = list(modified_chunks)
|
||||
|
||||
@@ -182,8 +174,7 @@ class DependencyAnalyzer:
|
||||
|
||||
return list(impacted)
|
||||
|
||||
def get_dependency_stats(self) -> Dict[str, int]:
|
||||
"""Get statistics about dependencies."""
|
||||
def get_dependency_stats(self) -> dict[str, int]:
|
||||
stats = {
|
||||
"total_nodes": len(self.dependency_graph),
|
||||
"nodes_with_deps": 0,
|
||||
@@ -199,16 +190,12 @@ class DependencyAnalyzer:
|
||||
depent_count = len(node.dependents)
|
||||
|
||||
stats["total_edges"] += dep_count
|
||||
|
||||
if dep_count > 0:
|
||||
stats["nodes_with_deps"] += 1
|
||||
|
||||
if depent_count > 0:
|
||||
stats["nodes_with_dependents"] += 1
|
||||
|
||||
if node.is_circular:
|
||||
stats["circular_deps"] += 1
|
||||
|
||||
stats["max_dependencies"] = max(stats["max_dependencies"], dep_count)
|
||||
stats["max_dependents"] = max(stats["max_dependents"], depent_count)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user