Compare commits
101 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 83e987275e | |||
| b2bca8ea7d | |||
| 36939144b6 | |||
| 591e26d27c | |||
| 96831fcb44 | |||
| 6726e8e6ae | |||
| 860ebb4eaf | |||
| ef0e97042e | |||
| 6d447dddb4 | |||
| 7ea3057bf6 | |||
| 63515eec3f | |||
| 9d88db7099 | |||
| d5cdbe815b | |||
| 3c46c50a61 | |||
| a2a4918873 | |||
| 3b318eae0a | |||
| 711103c5c8 | |||
| 45f44140ab | |||
| 73dc1ea7fb | |||
| a67c03d3d4 | |||
| ee2b30fd0d | |||
| a92df62a82 | |||
| 3d6adc4699 | |||
| c60b4c14be | |||
| 1e90a21b7d | |||
| 8b7e5fcf72 | |||
| 2eccaae2af | |||
| ae2530577a | |||
| 1833739df5 | |||
| 6c2c810495 | |||
| f052a36689 | |||
| 57c4dc2759 | |||
| 50de5654bf | |||
| cac4f1f8dd | |||
| cf5b0a02b0 | |||
| 0caa7c9585 | |||
| 9380411871 | |||
| 72bec60e37 | |||
| d9812fc17c | |||
| c41232b3f6 | |||
| c0758d1f2b | |||
| 7bbe910333 | |||
| 09792d2bba | |||
| d2c658d2a3 | |||
| 01af6ad53f | |||
| 4af2c953f7 | |||
| f2ca3181ee | |||
| 4f6f5e0370 | |||
| 05d7c2ec3f | |||
| 2579e5d030 | |||
| b886ea10e0 | |||
| 1ee4225434 | |||
| 048517a04d | |||
| 82dee07c85 | |||
| 368b86c08c | |||
| 9ad6acfe83 | |||
| 3931e15933 | |||
| b12acb76a2 | |||
| 377ed5c515 | |||
| 6151051249 | |||
| fd3c1e799c | |||
| 4dd48104d0 | |||
| 3c95d7056e | |||
| 5c168f8ca2 | |||
| 8555078b55 | |||
| 55304fd609 | |||
| fa87555741 | |||
| ea55f03230 | |||
| 572eedf7a0 | |||
| a50490b27b | |||
| cd555972f7 | |||
| 7d2a543e6e | |||
| debe653cfb | |||
| d304a028dc | |||
| dc3e5e0709 | |||
| fdd46a3a93 | |||
| bc40082883 | |||
| 4a62d7ecb9 | |||
| 043ef10ec2 | |||
| 874d632726 | |||
| 91237edcb0 | |||
| 65b1680e83 | |||
| 2746b88aa5 | |||
| a9c5110fa7 | |||
| 3e220bb139 | |||
| 5f4748b12e | |||
| 3ee6bb106c | |||
| 71675842ed | |||
| 54aedeb5b2 | |||
| 6c1104d60a | |||
| 8a68e0b691 | |||
| 62021dd68b | |||
| 460263345c | |||
| fb4ce2c22c | |||
| 1d6d354f80 | |||
| 3712786db6 | |||
| 492b42ec70 | |||
| cdde9f629d | |||
| 15b3e04647 | |||
| 72366c19db | |||
| 5b64148722 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,4 +1,4 @@
|
||||
__pycache__/
|
||||
# __pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
|
||||
17
README.md
17
README.md
@@ -190,11 +190,11 @@ Top Errors:
|
||||
```bash
|
||||
$ loglens report server.log --severity --output table
|
||||
Severity Breakdown:
|
||||
CRITICAL: ████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 2
|
||||
ERROR: ██████████████████████████████ 15
|
||||
WARNING: ██████████████████████████░░░░ 10
|
||||
INFO: ██████████████████████████████ 18
|
||||
DEBUG: ██████████████░░░░░░░░░░░░░░░░░ 6
|
||||
CRITICAL: ████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 2
|
||||
ERROR: ██████████████████████████████████████████████████ 15
|
||||
WARNING: ██████████████████████████░░░░░░░░░░░░░░░░░░░░░░░░ 10
|
||||
INFO: ██████████████████████████████████████████████████ 18
|
||||
DEBUG: ██████████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 6
|
||||
```
|
||||
|
||||
#### Real-time monitoring
|
||||
@@ -237,9 +237,6 @@ severity_rules:
|
||||
error:
|
||||
- "ERROR"
|
||||
- "Exception"
|
||||
warning:
|
||||
- "WARNING"
|
||||
- "Deprecated"
|
||||
```
|
||||
|
||||
### Custom Patterns
|
||||
@@ -310,10 +307,10 @@ pytest tests/integration/ -v
|
||||
|
||||
```bash
|
||||
# Ruff linting
|
||||
ruff check loglens/
|
||||
ruff check loglens/ tests/
|
||||
|
||||
# Type checking
|
||||
mypy loglens/ --ignore-missing-imports
|
||||
mypy loglens/
|
||||
```
|
||||
|
||||
### Code Formatting
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
"""Entry point for LogLens CLI."""
|
||||
|
||||
import sys
|
||||
from loglens.cli.main import main
|
||||
|
||||
from loglens.cli.main import main_cli
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
sys.exit(main_cli())
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""Log analyzers package."""
|
||||
|
||||
from loglens.analyzers.patterns import PatternLibrary, ErrorPattern
|
||||
from loglens.analyzers.severity import SeverityClassifier, SeverityLevel
|
||||
from loglens.analyzers.analyzer import LogAnalyzer
|
||||
from loglens.analyzers.patterns import ErrorPattern, PatternLibrary
|
||||
from loglens.analyzers.severity import SeverityClassifier, SeverityLevel
|
||||
|
||||
__all__ = [
|
||||
"PatternLibrary",
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
"""Log analyzer orchestrator."""
|
||||
|
||||
from collections import Counter, defaultdict
|
||||
from collections import Counter
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
from loglens.analyzers.patterns import PatternLibrary
|
||||
from loglens.analyzers.severity import SeverityClassifier
|
||||
from loglens.parsers.base import LogFormat, ParsedLogEntry
|
||||
from loglens.parsers.factory import ParserFactory
|
||||
from loglens.analyzers.patterns import ErrorPattern, PatternLibrary
|
||||
from loglens.analyzers.severity import SeverityClassifier, SeverityLevel
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnalysisResult:
|
||||
"""Result of log analysis."""
|
||||
entries: List[ParsedLogEntry] = field(default_factory=list)
|
||||
|
||||
entries: list[ParsedLogEntry] = field(default_factory=list)
|
||||
format_detected: LogFormat = LogFormat.UNKNOWN
|
||||
total_lines: int = 0
|
||||
parsed_count: int = 0
|
||||
@@ -22,19 +23,19 @@ class AnalysisResult:
|
||||
warning_count: int = 0
|
||||
critical_count: int = 0
|
||||
debug_count: int = 0
|
||||
pattern_matches: Dict[str, int] = field(default_factory=dict)
|
||||
severity_breakdown: Dict[str, int] = field(default_factory=dict)
|
||||
top_errors: List[Dict[str, Any]] = field(default_factory=list)
|
||||
host_breakdown: Dict[str, int] = field(default_factory=dict)
|
||||
pattern_matches: dict[str, int] = field(default_factory=dict)
|
||||
severity_breakdown: dict[str, int] = field(default_factory=dict)
|
||||
top_errors: list[dict[str, Any]] = field(default_factory=list)
|
||||
host_breakdown: dict[str, int] = field(default_factory=dict)
|
||||
time_range: Optional[tuple] = None
|
||||
analysis_time: datetime = field(default_factory=datetime.now)
|
||||
suggestions: List[str] = field(default_factory=list)
|
||||
suggestions: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
class LogAnalyzer:
|
||||
"""Orchestrates log parsing and analysis."""
|
||||
|
||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||
def __init__(self, config: Optional[dict[str, Any]] = None):
|
||||
self.parser_factory = ParserFactory()
|
||||
self.pattern_library = PatternLibrary()
|
||||
self.severity_classifier = SeverityClassifier(
|
||||
@@ -42,12 +43,9 @@ class LogAnalyzer:
|
||||
)
|
||||
self.config = config or {}
|
||||
|
||||
def analyze(self, lines: List[str], format: Optional[LogFormat] = None) -> AnalysisResult:
|
||||
def analyze(self, lines: list[str], format: Optional[LogFormat] = None) -> AnalysisResult:
|
||||
"""Analyze a list of log lines."""
|
||||
result = AnalysisResult(
|
||||
total_lines=len(lines),
|
||||
analysis_time=datetime.now()
|
||||
)
|
||||
result = AnalysisResult(total_lines=len(lines), analysis_time=datetime.now())
|
||||
|
||||
if not lines:
|
||||
return result
|
||||
@@ -79,9 +77,7 @@ class LogAnalyzer:
|
||||
entry.error_pattern = pattern.name
|
||||
|
||||
severity = self.severity_classifier.classify(
|
||||
level=entry.level,
|
||||
message=message,
|
||||
pattern_match=entry.error_pattern
|
||||
level=entry.level, message=message, pattern_match=entry.error_pattern
|
||||
)
|
||||
entry.severity = severity.value
|
||||
|
||||
@@ -118,13 +114,12 @@ class LogAnalyzer:
|
||||
result.time_range = (min(timestamps), max(timestamps))
|
||||
|
||||
result.top_errors = [
|
||||
{"pattern": name, "count": count}
|
||||
for name, count in pattern_counts.most_common(10)
|
||||
{"pattern": name, "count": count} for name, count in pattern_counts.most_common(10)
|
||||
]
|
||||
|
||||
result.suggestions = self._generate_suggestions(result)
|
||||
|
||||
def _generate_suggestions(self, result: AnalysisResult) -> List[str]:
|
||||
def _generate_suggestions(self, result: AnalysisResult) -> list[str]:
|
||||
"""Generate suggestions based on analysis."""
|
||||
suggestions = []
|
||||
|
||||
@@ -160,7 +155,7 @@ class LogAnalyzer:
|
||||
|
||||
def analyze_file(self, file_path: str, format: Optional[LogFormat] = None) -> AnalysisResult:
|
||||
"""Analyze a log file."""
|
||||
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
||||
with open(file_path, encoding="utf-8", errors="replace") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
return self.analyze(lines, format)
|
||||
@@ -168,10 +163,11 @@ class LogAnalyzer:
|
||||
def analyze_stdin(self) -> AnalysisResult:
|
||||
"""Analyze from stdin."""
|
||||
import sys
|
||||
|
||||
lines = sys.stdin.readlines()
|
||||
return self.analyze(lines)
|
||||
|
||||
def get_pattern_info(self, pattern_name: str) -> Optional[Dict[str, Any]]:
|
||||
def get_pattern_info(self, pattern_name: str) -> Optional[dict[str, Any]]:
|
||||
"""Get information about a pattern."""
|
||||
for pattern in self.pattern_library.list_patterns():
|
||||
if pattern.name == pattern_name:
|
||||
@@ -182,20 +178,16 @@ class LogAnalyzer:
|
||||
"description": pattern.description,
|
||||
"suggestion": pattern.suggestion,
|
||||
"group": pattern.group,
|
||||
"enabled": pattern.enabled
|
||||
"enabled": pattern.enabled,
|
||||
}
|
||||
return None
|
||||
|
||||
def list_patterns_by_group(self) -> Dict[str, List[Dict[str, Any]]]:
|
||||
def list_patterns_by_group(self) -> dict[str, list[dict[str, Any]]]:
|
||||
"""List all patterns organized by group."""
|
||||
result = {}
|
||||
for group_name, patterns in self.pattern_library.list_groups().items():
|
||||
result[group_name] = [
|
||||
{
|
||||
"name": p.name,
|
||||
"severity": p.severity,
|
||||
"description": p.description
|
||||
}
|
||||
{"name": p.name, "severity": p.severity, "description": p.description}
|
||||
for p in patterns
|
||||
]
|
||||
return result
|
||||
|
||||
@@ -2,12 +2,13 @@
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class ErrorPattern:
|
||||
"""Represents an error detection pattern."""
|
||||
|
||||
name: str
|
||||
pattern: str
|
||||
severity: str = "error"
|
||||
@@ -32,16 +33,17 @@ class ErrorPattern:
|
||||
@dataclass
|
||||
class PatternGroup:
|
||||
"""Group of related patterns."""
|
||||
|
||||
name: str
|
||||
patterns: List[ErrorPattern] = field(default_factory=list)
|
||||
patterns: list[ErrorPattern] = field(default_factory=list)
|
||||
|
||||
|
||||
class PatternLibrary:
|
||||
"""Library of error detection patterns."""
|
||||
|
||||
def __init__(self):
|
||||
self._patterns: List[ErrorPattern] = []
|
||||
self._groups: Dict[str, PatternGroup] = {}
|
||||
self._patterns: list[ErrorPattern] = []
|
||||
self._groups: dict[str, PatternGroup] = {}
|
||||
self._load_default_patterns()
|
||||
|
||||
def _load_default_patterns(self) -> None:
|
||||
@@ -53,7 +55,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Python exception detected",
|
||||
suggestion="Check the exception type and stack trace to identify the root cause",
|
||||
group="exceptions"
|
||||
group="exceptions",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Java Stack Trace",
|
||||
@@ -61,7 +63,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Java exception/stack trace detected",
|
||||
suggestion="Review the Java stack trace for the exception cause",
|
||||
group="exceptions"
|
||||
group="exceptions",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Connection Refused",
|
||||
@@ -69,7 +71,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Connection was refused",
|
||||
suggestion="Check if the service is running and the port is correct",
|
||||
group="network"
|
||||
group="network",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Connection Timeout",
|
||||
@@ -77,7 +79,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Connection timed out",
|
||||
suggestion="Check network connectivity and server responsiveness",
|
||||
group="network"
|
||||
group="network",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Database Error",
|
||||
@@ -85,7 +87,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Database error detected",
|
||||
suggestion="Check database connectivity and query syntax",
|
||||
group="database"
|
||||
group="database",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="SQL Error",
|
||||
@@ -93,7 +95,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="SQL error detected",
|
||||
suggestion="Review the SQL query for syntax errors",
|
||||
group="database"
|
||||
group="database",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="HTTP 5xx Error",
|
||||
@@ -101,7 +103,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Server-side HTTP error",
|
||||
suggestion="Check server logs for the root cause",
|
||||
group="http"
|
||||
group="http",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="HTTP 4xx Error",
|
||||
@@ -109,7 +111,7 @@ class PatternLibrary:
|
||||
severity="warning",
|
||||
description="Client-side HTTP error",
|
||||
suggestion="Check request URL and parameters",
|
||||
group="http"
|
||||
group="http",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Null Pointer",
|
||||
@@ -117,7 +119,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Null pointer/null reference error",
|
||||
suggestion="Add null checks before accessing objects",
|
||||
group="exceptions"
|
||||
group="exceptions",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Index Error",
|
||||
@@ -125,7 +127,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Index out of bounds error",
|
||||
suggestion="Check array/list bounds before access",
|
||||
group="exceptions"
|
||||
group="exceptions",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Key Error",
|
||||
@@ -133,7 +135,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Key not found in dictionary/map",
|
||||
suggestion="Add key existence checks or use .get() method",
|
||||
group="exceptions"
|
||||
group="exceptions",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Permission Denied",
|
||||
@@ -141,7 +143,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Permission denied error",
|
||||
suggestion="Check file/directory permissions",
|
||||
group="system"
|
||||
group="system",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Disk Full",
|
||||
@@ -149,7 +151,7 @@ class PatternLibrary:
|
||||
severity="critical",
|
||||
description="Disk space exhausted",
|
||||
suggestion="Free up disk space or increase storage",
|
||||
group="system"
|
||||
group="system",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Memory Error",
|
||||
@@ -157,7 +159,7 @@ class PatternLibrary:
|
||||
severity="critical",
|
||||
description="Out of memory error",
|
||||
suggestion="Increase memory or optimize memory usage",
|
||||
group="system"
|
||||
group="system",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Segmentation Fault",
|
||||
@@ -165,7 +167,7 @@ class PatternLibrary:
|
||||
severity="critical",
|
||||
description="Segmentation fault",
|
||||
suggestion="Check for null pointer dereferences or buffer overflows",
|
||||
group="system"
|
||||
group="system",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Panic",
|
||||
@@ -173,7 +175,7 @@ class PatternLibrary:
|
||||
severity="critical",
|
||||
description="Application panic",
|
||||
suggestion="Review panic message and stack trace",
|
||||
group="system"
|
||||
group="system",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Deprecated",
|
||||
@@ -181,7 +183,7 @@ class PatternLibrary:
|
||||
severity="info",
|
||||
description="Deprecated feature usage",
|
||||
suggestion="Update to the recommended replacement",
|
||||
group="code_quality"
|
||||
group="code_quality",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Warning",
|
||||
@@ -189,7 +191,7 @@ class PatternLibrary:
|
||||
severity="warning",
|
||||
description="General warning",
|
||||
suggestion="Review warning message for potential issues",
|
||||
group="general"
|
||||
group="general",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Debug",
|
||||
@@ -197,7 +199,7 @@ class PatternLibrary:
|
||||
severity="debug",
|
||||
description="Debug message",
|
||||
suggestion="Ignore unless debugging",
|
||||
group="general"
|
||||
group="general",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="Authentication Failed",
|
||||
@@ -205,7 +207,7 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="Authentication failure",
|
||||
suggestion="Verify username/password or API key",
|
||||
group="security"
|
||||
group="security",
|
||||
),
|
||||
ErrorPattern(
|
||||
name="SSL/TLS Error",
|
||||
@@ -213,46 +215,39 @@ class PatternLibrary:
|
||||
severity="error",
|
||||
description="SSL/TLS error",
|
||||
suggestion="Check certificate validity and configuration",
|
||||
group="security"
|
||||
group="security",
|
||||
),
|
||||
]
|
||||
|
||||
self._groups = {
|
||||
"exceptions": PatternGroup(
|
||||
name="Exceptions",
|
||||
patterns=[p for p in self._patterns if p.group == "exceptions"]
|
||||
name="Exceptions", patterns=[p for p in self._patterns if p.group == "exceptions"]
|
||||
),
|
||||
"network": PatternGroup(
|
||||
name="Network",
|
||||
patterns=[p for p in self._patterns if p.group == "network"]
|
||||
name="Network", patterns=[p for p in self._patterns if p.group == "network"]
|
||||
),
|
||||
"database": PatternGroup(
|
||||
name="Database",
|
||||
patterns=[p for p in self._patterns if p.group == "database"]
|
||||
name="Database", patterns=[p for p in self._patterns if p.group == "database"]
|
||||
),
|
||||
"http": PatternGroup(
|
||||
name="HTTP",
|
||||
patterns=[p for p in self._patterns if p.group == "http"]
|
||||
name="HTTP", patterns=[p for p in self._patterns if p.group == "http"]
|
||||
),
|
||||
"system": PatternGroup(
|
||||
name="System",
|
||||
patterns=[p for p in self._patterns if p.group == "system"]
|
||||
name="System", patterns=[p for p in self._patterns if p.group == "system"]
|
||||
),
|
||||
"security": PatternGroup(
|
||||
name="Security",
|
||||
patterns=[p for p in self._patterns if p.group == "security"]
|
||||
name="Security", patterns=[p for p in self._patterns if p.group == "security"]
|
||||
),
|
||||
"code_quality": PatternGroup(
|
||||
name="Code Quality",
|
||||
patterns=[p for p in self._patterns if p.group == "code_quality"]
|
||||
patterns=[p for p in self._patterns if p.group == "code_quality"],
|
||||
),
|
||||
"general": PatternGroup(
|
||||
name="General",
|
||||
patterns=[p for p in self._patterns if p.group == "general"]
|
||||
name="General", patterns=[p for p in self._patterns if p.group == "general"]
|
||||
),
|
||||
}
|
||||
|
||||
def detect(self, text: str) -> List[Tuple[ErrorPattern, re.Match]]:
|
||||
def detect(self, text: str) -> list[tuple[ErrorPattern, re.Match]]:
|
||||
"""Detect all patterns matching the text."""
|
||||
matches = []
|
||||
for pattern in self._patterns:
|
||||
@@ -262,7 +257,7 @@ class PatternLibrary:
|
||||
matches.append((pattern, match))
|
||||
return matches
|
||||
|
||||
def find_match(self, text: str) -> Optional[Tuple[ErrorPattern, re.Match]]:
|
||||
def find_match(self, text: str) -> Optional[tuple[ErrorPattern, re.Match]]:
|
||||
"""Find the first matching pattern."""
|
||||
for pattern in self._patterns:
|
||||
if pattern.enabled:
|
||||
@@ -271,11 +266,11 @@ class PatternLibrary:
|
||||
return (pattern, match)
|
||||
return None
|
||||
|
||||
def get_patterns_by_severity(self, severity: str) -> List[ErrorPattern]:
|
||||
def get_patterns_by_severity(self, severity: str) -> list[ErrorPattern]:
|
||||
"""Get patterns by severity level."""
|
||||
return [p for p in self._patterns if p.severity == severity]
|
||||
|
||||
def get_patterns_by_group(self, group: str) -> List[ErrorPattern]:
|
||||
def get_patterns_by_group(self, group: str) -> list[ErrorPattern]:
|
||||
"""Get patterns by group."""
|
||||
return [p for p in self._patterns if p.group == group]
|
||||
|
||||
@@ -307,10 +302,10 @@ class PatternLibrary:
|
||||
return True
|
||||
return False
|
||||
|
||||
def list_patterns(self) -> List[ErrorPattern]:
|
||||
def list_patterns(self) -> list[ErrorPattern]:
|
||||
"""List all patterns."""
|
||||
return self._patterns.copy()
|
||||
|
||||
def list_groups(self) -> Dict[str, List[ErrorPattern]]:
|
||||
def list_groups(self) -> dict[str, list[ErrorPattern]]:
|
||||
"""List patterns by group."""
|
||||
return {name: group.patterns.copy() for name, group in self._groups.items()}
|
||||
|
||||
@@ -2,11 +2,12 @@
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
class SeverityLevel(Enum):
|
||||
"""Severity levels for log entries."""
|
||||
|
||||
CRITICAL = "critical"
|
||||
ERROR = "error"
|
||||
WARNING = "warning"
|
||||
@@ -31,7 +32,7 @@ class SeverityLevel(Enum):
|
||||
SeverityLevel.WARNING: 3,
|
||||
SeverityLevel.INFO: 2,
|
||||
SeverityLevel.DEBUG: 1,
|
||||
SeverityLevel.UNKNOWN: 0
|
||||
SeverityLevel.UNKNOWN: 0,
|
||||
}
|
||||
return scores.get(self, 0)
|
||||
|
||||
@@ -52,8 +53,9 @@ class SeverityLevel(Enum):
|
||||
@dataclass
|
||||
class SeverityRule:
|
||||
"""Rule for severity classification."""
|
||||
|
||||
name: str
|
||||
patterns: List[str]
|
||||
patterns: list[str]
|
||||
severity: SeverityLevel
|
||||
weight: int = 1
|
||||
description: str = ""
|
||||
@@ -68,44 +70,44 @@ class SeverityClassifier:
|
||||
patterns=["fatal", "segfault", "panic", "core dumped", "critical system failure"],
|
||||
severity=SeverityLevel.CRITICAL,
|
||||
weight=10,
|
||||
description="Critical system failures"
|
||||
description="Critical system failures",
|
||||
),
|
||||
SeverityRule(
|
||||
name="error_keywords",
|
||||
patterns=["error", "exception", "failed", "failure", "timeout", "cannot", "unable"],
|
||||
severity=SeverityLevel.ERROR,
|
||||
weight=5,
|
||||
description="General errors"
|
||||
description="General errors",
|
||||
),
|
||||
SeverityRule(
|
||||
name="warning_keywords",
|
||||
patterns=["warning", "warn", "deprecated", "deprecation"],
|
||||
severity=SeverityLevel.WARNING,
|
||||
weight=3,
|
||||
description="Warnings and deprecations"
|
||||
description="Warnings and deprecations",
|
||||
),
|
||||
SeverityRule(
|
||||
name="info_keywords",
|
||||
patterns=["info", "notice", "started", "stopped", "loaded"],
|
||||
severity=SeverityLevel.INFO,
|
||||
weight=1,
|
||||
description="Informational messages"
|
||||
description="Informational messages",
|
||||
),
|
||||
SeverityRule(
|
||||
name="debug_keywords",
|
||||
patterns=["debug", "trace", "verbose"],
|
||||
severity=SeverityLevel.DEBUG,
|
||||
weight=0,
|
||||
description="Debug and trace messages"
|
||||
description="Debug and trace messages",
|
||||
),
|
||||
]
|
||||
|
||||
def __init__(self, custom_rules: Optional[List[Dict[str, Any]]] = None):
|
||||
self.rules: List[SeverityRule] = self.DEFAULT_RULES.copy()
|
||||
def __init__(self, custom_rules: Optional[list[dict[str, Any]]] = None):
|
||||
self.rules: list[SeverityRule] = self.DEFAULT_RULES.copy()
|
||||
if custom_rules:
|
||||
self._load_custom_rules(custom_rules)
|
||||
|
||||
def _load_custom_rules(self, rules: List[Dict[str, Any]]) -> None:
|
||||
def _load_custom_rules(self, rules: list[dict[str, Any]]) -> None:
|
||||
"""Load custom severity rules."""
|
||||
for rule_data in rules:
|
||||
rule = SeverityRule(
|
||||
@@ -113,11 +115,13 @@ class SeverityClassifier:
|
||||
patterns=rule_data.get("patterns", []),
|
||||
severity=SeverityLevel.from_string(rule_data.get("severity", "info")),
|
||||
weight=rule_data.get("weight", 1),
|
||||
description=rule_data.get("description", "")
|
||||
description=rule_data.get("description", ""),
|
||||
)
|
||||
self.rules.append(rule)
|
||||
|
||||
def classify(self, level: Optional[str], message: str = "", pattern_match: Optional[str] = None) -> SeverityLevel:
|
||||
def classify(
|
||||
self, level: Optional[str], message: str = "", pattern_match: Optional[str] = None
|
||||
) -> SeverityLevel:
|
||||
"""Classify severity based on level, message, and pattern."""
|
||||
score = 0
|
||||
matched_severity = SeverityLevel.UNKNOWN
|
||||
@@ -159,7 +163,9 @@ class SeverityClassifier:
|
||||
|
||||
return SeverityLevel.INFO
|
||||
|
||||
def classify_with_details(self, level: Optional[str], message: str = "", pattern_match: Optional[str] = None) -> Dict[str, Any]:
|
||||
def classify_with_details(
|
||||
self, level: Optional[str], message: str = "", pattern_match: Optional[str] = None
|
||||
) -> dict[str, Any]:
|
||||
"""Classify severity with detailed information."""
|
||||
severity = self.classify(level, message, pattern_match)
|
||||
|
||||
@@ -169,25 +175,33 @@ class SeverityClassifier:
|
||||
for rule in self.rules:
|
||||
for pattern in rule.patterns:
|
||||
if pattern.lower() in text:
|
||||
matched_rules.append({
|
||||
matched_rules.append(
|
||||
{
|
||||
"rule": rule.name,
|
||||
"pattern": pattern,
|
||||
"severity": rule.severity.value,
|
||||
"weight": rule.weight
|
||||
})
|
||||
"weight": rule.weight,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"severity": severity,
|
||||
"matched_rules": matched_rules,
|
||||
"confidence": min(1.0, len(matched_rules) * 0.3) if matched_rules else 0.5
|
||||
"confidence": min(1.0, len(matched_rules) * 0.3) if matched_rules else 0.5,
|
||||
}
|
||||
|
||||
def get_severity_order(self) -> List[SeverityLevel]:
|
||||
def get_severity_order(self) -> list[SeverityLevel]:
|
||||
"""Get severity levels in order from highest to lowest."""
|
||||
return sorted(
|
||||
[SeverityLevel.CRITICAL, SeverityLevel.ERROR, SeverityLevel.WARNING,
|
||||
SeverityLevel.INFO, SeverityLevel.DEBUG, SeverityLevel.UNKNOWN],
|
||||
reverse=True
|
||||
[
|
||||
SeverityLevel.CRITICAL,
|
||||
SeverityLevel.ERROR,
|
||||
SeverityLevel.WARNING,
|
||||
SeverityLevel.INFO,
|
||||
SeverityLevel.DEBUG,
|
||||
SeverityLevel.UNKNOWN,
|
||||
],
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
def add_rule(self, rule: SeverityRule) -> None:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""CLI package."""
|
||||
|
||||
from loglens.cli.commands import analyze, report, watch
|
||||
from loglens.cli.main import main
|
||||
from loglens.cli.commands import analyze, watch, report
|
||||
|
||||
__all__ = ["main", "analyze", "watch", "report"]
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
"""Click CLI commands for LogLens."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import click
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
from loglens.parsers.base import LogFormat
|
||||
from loglens.analyzers.analyzer import LogAnalyzer
|
||||
from loglens.formatters.table_formatter import TableFormatter
|
||||
from loglens.formatters.json_formatter import JSONFormatter
|
||||
from loglens.formatters.table_formatter import TableFormatter
|
||||
from loglens.formatters.text_formatter import TextFormatter
|
||||
from loglens.parsers.base import LogFormat
|
||||
|
||||
|
||||
def setup_logging(verbosity: int = 0) -> None:
|
||||
@@ -29,7 +29,7 @@ def setup_logging(verbosity: int = 0) -> None:
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red,bg_white",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
logger = logging.getLogger("loglens")
|
||||
@@ -40,23 +40,34 @@ def setup_logging(verbosity: int = 0) -> None:
|
||||
@click.group()
|
||||
@click.option("--verbosity", "-v", count=True, help="Increase output verbosity")
|
||||
@click.option("--config", type=click.Path(exists=True), help="Path to config file")
|
||||
@click.version_option(version="0.1.0", prog_name="loglens")
|
||||
@click.pass_context
|
||||
def main(ctx: click.Context, verbosity: int, config: str) -> None:
|
||||
"""LogLens - Parse, analyze, and summarize log files."""
|
||||
setup_logging(verbosity)
|
||||
from loglens import __version__
|
||||
|
||||
ctx.ensure_object(dict)
|
||||
ctx.obj["config"] = config
|
||||
ctx.obj["version"] = __version__
|
||||
setup_logging(verbosity)
|
||||
|
||||
|
||||
@main.command("analyze")
|
||||
@click.argument("files", type=click.Path(exists=True), nargs=-1)
|
||||
@click.option("--format", type=click.Choice(["json", "syslog", "apache", "auto"]),
|
||||
default="auto", help="Log format (auto-detect by default)")
|
||||
@click.option("--output", type=click.Choice(["table", "json", "text"]), default="table",
|
||||
help="Output format")
|
||||
@click.option(
|
||||
"--format",
|
||||
type=click.Choice(["json", "syslog", "apache", "auto"]),
|
||||
default="auto",
|
||||
help="Log format (auto-detect by default)",
|
||||
)
|
||||
@click.option(
|
||||
"--output", type=click.Choice(["table", "json", "text"]), default="table", help="Output format"
|
||||
)
|
||||
@click.option("--follow/--no-follow", default=False, help="Follow file changes")
|
||||
@click.option("--max-entries", type=int, default=100, help="Maximum entries to display")
|
||||
@click.option("--json/--no-json", default=False, help="Output as JSON (shorthand for --output json)")
|
||||
@click.option(
|
||||
"--json/--no-json", default=False, help="Output as JSON (shorthand for --output json)"
|
||||
)
|
||||
@click.pass_context
|
||||
def analyze(
|
||||
ctx: click.Context,
|
||||
@@ -65,7 +76,7 @@ def analyze(
|
||||
output: str,
|
||||
follow: bool,
|
||||
max_entries: int,
|
||||
json: bool
|
||||
json: bool,
|
||||
) -> None:
|
||||
"""Analyze log files and display summary."""
|
||||
if json:
|
||||
@@ -97,7 +108,9 @@ def _analyze_lines(lines: list, format_str: str, output: str, max_entries: int)
|
||||
_display_result(result, output, max_entries)
|
||||
|
||||
|
||||
def _analyze_file(file_path: str, format_str: str, output: str, max_entries: int, follow: bool) -> None:
|
||||
def _analyze_file(
|
||||
file_path: str, format_str: str, output: str, max_entries: int, follow: bool
|
||||
) -> None:
|
||||
"""Analyze a single file."""
|
||||
format_enum = None if format_str == "auto" else LogFormat(format_str)
|
||||
analyzer = LogAnalyzer()
|
||||
@@ -109,10 +122,15 @@ def _analyze_file(file_path: str, format_str: str, output: str, max_entries: int
|
||||
_display_result(result, output, max_entries)
|
||||
|
||||
|
||||
def _follow_file(file_path: str, analyzer: LogAnalyzer, format: Optional[LogFormat],
|
||||
output: str, max_entries: int) -> None:
|
||||
def _follow_file(
|
||||
file_path: str,
|
||||
analyzer: LogAnalyzer,
|
||||
format: Optional[LogFormat],
|
||||
output: str,
|
||||
max_entries: int,
|
||||
) -> None:
|
||||
"""Follow a file and analyze in real-time."""
|
||||
with open(file_path, "r") as f:
|
||||
with open(file_path) as f:
|
||||
f.seek(0, 2)
|
||||
buffer = []
|
||||
|
||||
@@ -150,18 +168,16 @@ def _display_result(result, output: str, max_entries: int) -> None:
|
||||
|
||||
@main.command("watch")
|
||||
@click.argument("files", type=click.Path(exists=True), nargs=-1)
|
||||
@click.option("--format", type=click.Choice(["json", "syslog", "apache", "auto"]),
|
||||
default="auto", help="Log format")
|
||||
@click.option(
|
||||
"--format",
|
||||
type=click.Choice(["json", "syslog", "apache", "auto"]),
|
||||
default="auto",
|
||||
help="Log format",
|
||||
)
|
||||
@click.option("--interval", type=float, default=1.0, help="Refresh interval in seconds")
|
||||
@click.option("--max-entries", type=int, default=50, help="Maximum entries per update")
|
||||
@click.pass_context
|
||||
def watch(
|
||||
ctx: click.Context,
|
||||
files: tuple,
|
||||
format: str,
|
||||
interval: float,
|
||||
max_entries: int
|
||||
) -> None:
|
||||
def watch(ctx: click.Context, files: tuple, format: str, interval: float, max_entries: int) -> None:
|
||||
"""Watch log files and display live updates."""
|
||||
if not files:
|
||||
click.echo("Error: No files specified for watching.")
|
||||
@@ -189,17 +205,17 @@ def watch(
|
||||
|
||||
@main.command("report")
|
||||
@click.argument("files", type=click.Path(exists=True), nargs=-1)
|
||||
@click.option("--format", type=click.Choice(["json", "syslog", "apache", "auto"]),
|
||||
default="auto", help="Log format")
|
||||
@click.option(
|
||||
"--format",
|
||||
type=click.Choice(["json", "syslog", "apache", "auto"]),
|
||||
default="auto",
|
||||
help="Log format",
|
||||
)
|
||||
@click.option("--output", type=click.Path(), help="Output file path (default: stdout)")
|
||||
@click.option("--json/--no-json", default=False, help="Output as JSON")
|
||||
@click.pass_context
|
||||
def report(
|
||||
ctx: click.Context,
|
||||
files: tuple,
|
||||
format: str,
|
||||
output: Optional[str],
|
||||
json: bool
|
||||
ctx: click.Context, files: tuple, format: str, output: Optional[str], json: bool
|
||||
) -> None:
|
||||
"""Generate detailed analysis report."""
|
||||
if not files:
|
||||
@@ -218,10 +234,7 @@ def report(
|
||||
formatter = JSONFormatter()
|
||||
report_data = {
|
||||
"files_analyzed": len(files),
|
||||
"results": [
|
||||
{"file": path, "analysis": result}
|
||||
for path, result in all_results
|
||||
]
|
||||
"results": [{"file": path, "analysis": result} for path, result in all_results],
|
||||
}
|
||||
report_text = formatter.format(report_data)
|
||||
else:
|
||||
@@ -236,8 +249,10 @@ def report(
|
||||
lines.append(f"=== {file_path} ===")
|
||||
lines.append(f"Total Lines: {result.total_lines}")
|
||||
lines.append(f"Format: {result.format_detected.value}")
|
||||
lines.append(f"Critical: {result.critical_count} | Error: {result.error_count} | "
|
||||
f"Warning: {result.warning_count} | Info: {result.debug_count}")
|
||||
lines.append(
|
||||
f"Critical: {result.critical_count} | Error: {result.error_count} | "
|
||||
f"Warning: {result.warning_count} | Info: {result.debug_count}"
|
||||
)
|
||||
lines.append("")
|
||||
|
||||
if result.suggestions:
|
||||
@@ -258,8 +273,11 @@ def report(
|
||||
|
||||
@main.command("patterns")
|
||||
@click.option("--group", help="Filter by pattern group")
|
||||
@click.option("--severity", type=click.Choice(["critical", "error", "warning", "info", "debug"]),
|
||||
help="Filter by severity")
|
||||
@click.option(
|
||||
"--severity",
|
||||
type=click.Choice(["critical", "error", "warning", "info", "debug"]),
|
||||
help="Filter by severity",
|
||||
)
|
||||
@click.pass_context
|
||||
def patterns(ctx: click.Context, group: str, severity: str) -> None:
|
||||
"""List available error detection patterns."""
|
||||
@@ -286,11 +304,10 @@ def patterns(ctx: click.Context, group: str, severity: str) -> None:
|
||||
"error": "red",
|
||||
"warning": "yellow",
|
||||
"info": "blue",
|
||||
"debug": "grey"
|
||||
"debug": "grey",
|
||||
}.get(pattern["severity"], "white")
|
||||
formatter.console.print(
|
||||
f" [bold]{pattern['name']}[/] "
|
||||
f"[{severity_color}]({pattern['severity']})[/]"
|
||||
f" [bold]{pattern['name']}[/] " f"[{severity_color}]({pattern['severity']})[/]"
|
||||
)
|
||||
if pattern["description"]:
|
||||
formatter.console.print(f" {pattern['description']}")
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
"""Main CLI entry point."""
|
||||
|
||||
from loglens.cli.commands import main
|
||||
|
||||
import sys
|
||||
|
||||
from loglens.cli.commands import main
|
||||
|
||||
|
||||
def main_cli() -> int:
|
||||
"""Main entry point for LogLens CLI."""
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""Output formatters package."""
|
||||
|
||||
from loglens.formatters.base import OutputFormatter
|
||||
from loglens.formatters.table_formatter import TableFormatter
|
||||
from loglens.formatters.json_formatter import JSONFormatter
|
||||
from loglens.formatters.table_formatter import TableFormatter
|
||||
from loglens.formatters.text_formatter import TextFormatter
|
||||
|
||||
__all__ = [
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
"""Base formatter class."""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Dict, List, TextIO
|
||||
from typing import Any, Optional, TextIO
|
||||
|
||||
|
||||
class OutputFormatter(ABC):
|
||||
"""Abstract base class for output formatters."""
|
||||
|
||||
def __init__(self, output: TextIO = None):
|
||||
def __init__(self, output: Optional[TextIO] = None):
|
||||
self.output = output
|
||||
|
||||
@abstractmethod
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""JSON output formatter."""
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any
|
||||
|
||||
from loglens.analyzers.analyzer import AnalysisResult
|
||||
from loglens.parsers.base import ParsedLogEntry
|
||||
from loglens.formatters.base import OutputFormatter
|
||||
from loglens.parsers.base import ParsedLogEntry
|
||||
|
||||
|
||||
class JSONFormatter(OutputFormatter):
|
||||
@@ -27,29 +27,29 @@ class JSONFormatter(OutputFormatter):
|
||||
"total_lines": result.total_lines,
|
||||
"parsed_entries": result.parsed_count,
|
||||
"format_detected": result.format_detected.value,
|
||||
"analysis_time": result.analysis_time.isoformat()
|
||||
"analysis_time": result.analysis_time.isoformat(),
|
||||
},
|
||||
"severity_breakdown": result.severity_breakdown,
|
||||
"pattern_matches": result.pattern_matches,
|
||||
"top_errors": result.top_errors,
|
||||
"host_breakdown": result.host_breakdown,
|
||||
"suggestions": result.suggestions
|
||||
"suggestions": result.suggestions,
|
||||
}
|
||||
|
||||
if result.time_range:
|
||||
output["time_range"] = {
|
||||
"start": result.time_range[0].isoformat(),
|
||||
"end": result.time_range[1].isoformat()
|
||||
"end": result.time_range[1].isoformat(),
|
||||
}
|
||||
|
||||
return json.dumps(output, default=str, indent=2)
|
||||
|
||||
def _format_entries(self, entries: List[ParsedLogEntry]) -> str:
|
||||
def _format_entries(self, entries: list[ParsedLogEntry]) -> str:
|
||||
"""Format log entries as JSON array."""
|
||||
output = [entry.to_dict() for entry in entries]
|
||||
return json.dumps(output, default=str, indent=2)
|
||||
|
||||
def format_entries_compact(self, entries: List[ParsedLogEntry]) -> str:
|
||||
def format_entries_compact(self, entries: list[ParsedLogEntry]) -> str:
|
||||
"""Format entries as compact JSON (one per line)."""
|
||||
lines = []
|
||||
for entry in entries:
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
"""Table formatter using Rich library."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
from rich import box
|
||||
from rich.console import Console
|
||||
from rich.style import Style
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
from rich import box
|
||||
from rich.style import Style
|
||||
|
||||
from loglens.analyzers.analyzer import AnalysisResult
|
||||
from loglens.analyzers.severity import SeverityLevel
|
||||
from loglens.parsers.base import ParsedLogEntry
|
||||
from loglens.formatters.base import OutputFormatter
|
||||
from loglens.parsers.base import ParsedLogEntry
|
||||
|
||||
|
||||
class TableFormatter(OutputFormatter):
|
||||
@@ -26,8 +25,12 @@ class TableFormatter(OutputFormatter):
|
||||
"unknown": Style(color="white"),
|
||||
}
|
||||
|
||||
def __init__(self, console: Console = None, show_timestamps: bool = True,
|
||||
max_entries: int = 100):
|
||||
def __init__(
|
||||
self,
|
||||
console: Optional[Console] = None,
|
||||
show_timestamps: bool = True,
|
||||
max_entries: int = 100,
|
||||
):
|
||||
super().__init__()
|
||||
self.console = console or Console()
|
||||
self.show_timestamps = show_timestamps
|
||||
@@ -44,8 +47,6 @@ class TableFormatter(OutputFormatter):
|
||||
|
||||
def _format_analysis_result(self, result: AnalysisResult) -> str:
|
||||
"""Format analysis result as summary table."""
|
||||
output = []
|
||||
|
||||
summary_table = Table(title="Log Analysis Summary", box=box.ROUNDED)
|
||||
summary_table.add_column("Metric", style="cyan")
|
||||
summary_table.add_column("Value", style="magenta")
|
||||
@@ -69,11 +70,7 @@ class TableFormatter(OutputFormatter):
|
||||
for level in ["critical", "error", "warning", "info", "debug"]:
|
||||
count = getattr(result, f"{level}_count", 0)
|
||||
pct = (count / total) * 100
|
||||
severity_table.add_row(
|
||||
level.upper(),
|
||||
str(count),
|
||||
f"{pct:.1f}%"
|
||||
)
|
||||
severity_table.add_row(level.upper(), str(count), f"{pct:.1f}%")
|
||||
|
||||
self.console.print(severity_table)
|
||||
|
||||
@@ -98,7 +95,7 @@ class TableFormatter(OutputFormatter):
|
||||
|
||||
return ""
|
||||
|
||||
def _format_entries(self, entries: List[ParsedLogEntry]) -> str:
|
||||
def _format_entries(self, entries: list[ParsedLogEntry]) -> str:
|
||||
"""Format log entries as table."""
|
||||
table = Table(title="Log Entries", box=box.ROUNDED)
|
||||
table.add_column("#", justify="right", style="dim")
|
||||
@@ -107,7 +104,7 @@ class TableFormatter(OutputFormatter):
|
||||
table.add_column("Severity", style="bold")
|
||||
table.add_column("Message", overflow="fold")
|
||||
|
||||
displayed = entries[:self.max_entries]
|
||||
displayed = entries[: self.max_entries]
|
||||
for entry in displayed:
|
||||
row = [str(entry.line_number)]
|
||||
|
||||
@@ -125,17 +122,14 @@ class TableFormatter(OutputFormatter):
|
||||
table.add_row(*row)
|
||||
|
||||
if len(entries) > self.max_entries:
|
||||
table.add_row(
|
||||
f"... and {len(entries) - self.max_entries} more",
|
||||
"", "", ""
|
||||
)
|
||||
table.add_row(f"... and {len(entries) - self.max_entries} more", "", "", "")
|
||||
|
||||
self.console.print(table)
|
||||
return ""
|
||||
|
||||
def format_entries_detailed(self, entries: List[ParsedLogEntry]) -> str:
|
||||
def format_entries_detailed(self, entries: list[ParsedLogEntry]) -> str:
|
||||
"""Format entries with full details."""
|
||||
for entry in entries[:self.max_entries]:
|
||||
for entry in entries[: self.max_entries]:
|
||||
self._print_entry_detailed(entry)
|
||||
|
||||
return ""
|
||||
@@ -143,7 +137,6 @@ class TableFormatter(OutputFormatter):
|
||||
def _print_entry_detailed(self, entry: ParsedLogEntry) -> None:
|
||||
"""Print a single entry with full details."""
|
||||
from rich.panel import Panel
|
||||
from rich.columns import Columns
|
||||
|
||||
severity = entry.severity or "unknown"
|
||||
style = self.SEVERITY_STYLES.get(severity, self.SEVERITY_STYLES["unknown"])
|
||||
@@ -174,10 +167,7 @@ class TableFormatter(OutputFormatter):
|
||||
content.append(f" {key}: {value}")
|
||||
|
||||
panel = Panel(
|
||||
"\n".join(content),
|
||||
title=f"Entry #{entry.line_number}",
|
||||
style=style,
|
||||
box=box.SIMPLE
|
||||
"\n".join(content), title=f"Entry #{entry.line_number}", style=style, box=box.SIMPLE
|
||||
)
|
||||
|
||||
self.console.print(panel)
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
"""Text output formatter."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, List
|
||||
from typing import Any
|
||||
|
||||
from loglens.analyzers.analyzer import AnalysisResult
|
||||
from loglens.parsers.base import ParsedLogEntry
|
||||
from loglens.formatters.base import OutputFormatter
|
||||
from loglens.parsers.base import ParsedLogEntry
|
||||
|
||||
|
||||
class TextFormatter(OutputFormatter):
|
||||
@@ -58,7 +57,7 @@ class TextFormatter(OutputFormatter):
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _format_entries(self, entries: List[ParsedLogEntry]) -> str:
|
||||
def _format_entries(self, entries: list[ParsedLogEntry]) -> str:
|
||||
"""Format log entries as text lines."""
|
||||
lines = []
|
||||
for entry in entries:
|
||||
@@ -79,7 +78,7 @@ class TextFormatter(OutputFormatter):
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def format_entries_compact(self, entries: List[ParsedLogEntry], max_lines: int = 100) -> str:
|
||||
def format_entries_compact(self, entries: list[ParsedLogEntry], max_lines: int = 100) -> str:
|
||||
"""Format entries compactly."""
|
||||
lines = []
|
||||
for entry in entries[:max_lines]:
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
"""Log parsers package."""
|
||||
|
||||
from loglens.parsers.apache_parser import ApacheParser
|
||||
from loglens.parsers.base import LogParser, ParsedLogEntry
|
||||
from loglens.parsers.factory import ParserFactory
|
||||
from loglens.parsers.json_parser import JSONParser
|
||||
from loglens.parsers.syslog_parser import SyslogParser
|
||||
from loglens.parsers.apache_parser import ApacheParser
|
||||
from loglens.parsers.factory import ParserFactory
|
||||
|
||||
__all__ = [
|
||||
"LogParser",
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Match, Optional
|
||||
from urllib.parse import parse_qs, unquote
|
||||
from re import Match
|
||||
from typing import Any, Optional
|
||||
|
||||
from loglens.parsers.base import LogParser, ParsedLogEntry
|
||||
|
||||
@@ -26,7 +26,7 @@ class ApacheParser(LogParser):
|
||||
)
|
||||
|
||||
ERROR_PATTERN = re.compile(
|
||||
r'^\[[A-Z][a-z]{2}\s+[A-Z][a-z]{2}\s+\d{1,2}\s+\d{2}:\d{2}:\d{2}(?:\.\d+)?\s+\d{4})\]\s+\[([^\]:]+):([^\]]+)\]\s+(?:\[pid\s+(\d+)\]\s+)?(?P<message>.*)$'
|
||||
r"^\[([A-Z][a-z]{2}\s+[A-Z][a-z]{2}\s+\d{1,2}\s+\d{2}:\d{2}:\d{2}(?:\.\d+)?\s+\d{4})\]\s+\[([^\]:]+):([^\]]+)\]\s+(?:\[pid\s+(\d+)\]\s+)?(?P<message>.*)$"
|
||||
)
|
||||
|
||||
STATUS_CODES = {
|
||||
@@ -34,7 +34,7 @@ class ApacheParser(LogParser):
|
||||
"2xx": "success",
|
||||
"3xx": "redirection",
|
||||
"4xx": "client_error",
|
||||
"5xx": "server_error"
|
||||
"5xx": "server_error",
|
||||
}
|
||||
|
||||
def __init__(self, custom_format: Optional[str] = None):
|
||||
@@ -47,15 +47,17 @@ class ApacheParser(LogParser):
|
||||
self.custom_pattern = None
|
||||
return
|
||||
|
||||
pattern_str = format_str.replace("%h", r"(?P<ip>\S+)") \
|
||||
.replace("%l", r"\S+") \
|
||||
.replace("%u", r"\S+") \
|
||||
.replace("%t", r"\[(?P<timestamp>.*?)\]") \
|
||||
.replace("%r", r'"(?P<method>\S+)\s+(?P<path>.*?)\s+(?P<protocol>\S+)"') \
|
||||
.replace("%s", r"(?P<status>\d{3})") \
|
||||
.replace("%b", r"(?P<size>\S+)") \
|
||||
.replace("%{Referer}i", r'"(?P<referer>.*?)"') \
|
||||
pattern_str = (
|
||||
format_str.replace("%h", r"(?P<ip>\S+)")
|
||||
.replace("%l", r"\S+")
|
||||
.replace("%u", r"\S+")
|
||||
.replace("%t", r"\[(?P<timestamp>.*?)\]")
|
||||
.replace("%r", r'"(?P<method>\S+)\s+(?P<path>.*?)\s+(?P<protocol>\S+)"')
|
||||
.replace("%s", r"(?P<status>\d{3})")
|
||||
.replace("%b", r"(?P<size>\S+)")
|
||||
.replace("%{Referer}i", r'"(?P<referer>.*?)"')
|
||||
.replace("%{User-agent}i", r'"(?P<user_agent>.*?)"')
|
||||
)
|
||||
|
||||
try:
|
||||
self.custom_pattern = re.compile("^" + pattern_str)
|
||||
@@ -87,10 +89,7 @@ class ApacheParser(LogParser):
|
||||
if not line:
|
||||
return None
|
||||
|
||||
entry = ParsedLogEntry(
|
||||
raw_line=line,
|
||||
line_number=line_number
|
||||
)
|
||||
entry = ParsedLogEntry(raw_line=line, line_number=line_number)
|
||||
|
||||
parsed = None
|
||||
|
||||
@@ -128,7 +127,7 @@ class ApacheParser(LogParser):
|
||||
|
||||
return entry
|
||||
|
||||
def _extract_from_match(self, match: Match, line: str) -> Dict[str, Any]:
|
||||
def _extract_from_match(self, match: Match, line: str) -> dict[str, Any]:
|
||||
"""Extract data from regex match."""
|
||||
result = {}
|
||||
|
||||
@@ -143,7 +142,7 @@ class ApacheParser(LogParser):
|
||||
|
||||
if "request" in groups:
|
||||
request = groups["request"]
|
||||
request_match = re.match(r'(?P<method>\S+)\s+(?P<path>.*)', request)
|
||||
request_match = re.match(r"(?P<method>\S+)\s+(?P<path>.*)", request)
|
||||
if request_match:
|
||||
result["method"] = request_match.group("method")
|
||||
result["path"] = request_match.group("path")
|
||||
@@ -174,7 +173,7 @@ class ApacheParser(LogParser):
|
||||
|
||||
return result
|
||||
|
||||
def _extract_error_from_match(self, match: Match, line: str) -> Dict[str, Any]:
|
||||
def _extract_error_from_match(self, match: Match, line: str) -> dict[str, Any]:
|
||||
"""Extract data from error log match."""
|
||||
groups = match.groupdict()
|
||||
|
||||
|
||||
@@ -3,12 +3,13 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from enum import Enum
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
class LogFormat(Enum):
|
||||
"""Supported log formats."""
|
||||
|
||||
JSON = "json"
|
||||
SYSLOG = "syslog"
|
||||
APACHE = "apache"
|
||||
@@ -18,6 +19,7 @@ class LogFormat(Enum):
|
||||
@dataclass
|
||||
class ParsedLogEntry:
|
||||
"""Represents a parsed log entry."""
|
||||
|
||||
raw_line: str
|
||||
timestamp: Optional[datetime] = None
|
||||
level: Optional[str] = None
|
||||
@@ -27,11 +29,11 @@ class ParsedLogEntry:
|
||||
facility: Optional[str] = None
|
||||
severity: Optional[str] = None
|
||||
logger: Optional[str] = None
|
||||
extra: Dict[str, Any] = field(default_factory=dict)
|
||||
extra: dict[str, Any] = field(default_factory=dict)
|
||||
line_number: int = 0
|
||||
error_pattern: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
"""Convert to dictionary."""
|
||||
result = {
|
||||
"raw_line": self.raw_line,
|
||||
@@ -74,7 +76,7 @@ class LogParser(ABC):
|
||||
"""Check if this parser can handle the given line."""
|
||||
pass
|
||||
|
||||
def parse_batch(self, lines: List[str]) -> List[ParsedLogEntry]:
|
||||
def parse_batch(self, lines: list[str]) -> list[ParsedLogEntry]:
|
||||
"""Parse multiple lines."""
|
||||
results = []
|
||||
for i, line in enumerate(lines, 1):
|
||||
@@ -83,9 +85,5 @@ class LogParser(ABC):
|
||||
if entry:
|
||||
results.append(entry)
|
||||
except Exception:
|
||||
results.append(ParsedLogEntry(
|
||||
raw_line=line,
|
||||
message="Parse error",
|
||||
line_number=i
|
||||
))
|
||||
results.append(ParsedLogEntry(raw_line=line, message="Parse error", line_number=i))
|
||||
return results
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
"""Parser factory for automatic format detection."""
|
||||
|
||||
from typing import Dict, List, Optional, Type
|
||||
from typing import Optional
|
||||
|
||||
from loglens.parsers.apache_parser import ApacheParser
|
||||
from loglens.parsers.base import LogFormat, LogParser, ParsedLogEntry
|
||||
from loglens.parsers.json_parser import JSONParser
|
||||
from loglens.parsers.syslog_parser import SyslogParser
|
||||
from loglens.parsers.apache_parser import ApacheParser
|
||||
|
||||
|
||||
class ParserFactory:
|
||||
"""Factory for creating and selecting log parsers."""
|
||||
|
||||
def __init__(self):
|
||||
self.parsers: Dict[LogFormat, Type[LogParser]] = {
|
||||
self.parsers: dict[LogFormat, type[LogParser]] = {
|
||||
LogFormat.JSON: JSONParser,
|
||||
LogFormat.SYSLOG: SyslogParser,
|
||||
LogFormat.APACHE: ApacheParser,
|
||||
}
|
||||
self._parser_instances: Dict[LogFormat, LogParser] = {}
|
||||
self._parser_instances: dict[LogFormat, LogParser] = {}
|
||||
|
||||
def get_parser(self, format: LogFormat) -> LogParser:
|
||||
"""Get parser instance for specified format."""
|
||||
@@ -43,32 +43,38 @@ class ParserFactory:
|
||||
|
||||
return LogFormat.UNKNOWN
|
||||
|
||||
def detect_format_batch(self, lines: List[str], sample_size: int = 10) -> LogFormat:
|
||||
def detect_format_batch(self, lines: list[str], sample_size: int = 10) -> LogFormat:
|
||||
"""Detect format from multiple lines."""
|
||||
sample = lines[:sample_size] if len(lines) > sample_size else lines
|
||||
|
||||
if not sample:
|
||||
return LogFormat.UNKNOWN
|
||||
|
||||
format_counts: Dict[LogFormat, int] = {
|
||||
format_counts: dict[LogFormat, int] = {
|
||||
LogFormat.JSON: 0,
|
||||
LogFormat.SYSLOG: 0,
|
||||
LogFormat.APACHE: 0,
|
||||
LogFormat.UNKNOWN: 0
|
||||
LogFormat.UNKNOWN: 0,
|
||||
}
|
||||
|
||||
for line in sample:
|
||||
format_detected = self.detect_format(line)
|
||||
format_counts[format_detected] += 1
|
||||
|
||||
if format_counts[LogFormat.JSON] > format_counts[LogFormat.SYSLOG] and \
|
||||
format_counts[LogFormat.JSON] > format_counts[LogFormat.APACHE]:
|
||||
if (
|
||||
format_counts[LogFormat.JSON] > format_counts[LogFormat.SYSLOG]
|
||||
and format_counts[LogFormat.JSON] > format_counts[LogFormat.APACHE]
|
||||
):
|
||||
return LogFormat.JSON
|
||||
elif format_counts[LogFormat.SYSLOG] > format_counts[LogFormat.JSON] and \
|
||||
format_counts[LogFormat.SYSLOG] > format_counts[LogFormat.APACHE]:
|
||||
elif (
|
||||
format_counts[LogFormat.SYSLOG] > format_counts[LogFormat.JSON]
|
||||
and format_counts[LogFormat.SYSLOG] > format_counts[LogFormat.APACHE]
|
||||
):
|
||||
return LogFormat.SYSLOG
|
||||
elif format_counts[LogFormat.APACHE] > format_counts[LogFormat.JSON] and \
|
||||
format_counts[LogFormat.APACHE] > format_counts[LogFormat.SYSLOG]:
|
||||
elif (
|
||||
format_counts[LogFormat.APACHE] > format_counts[LogFormat.JSON]
|
||||
and format_counts[LogFormat.APACHE] > format_counts[LogFormat.SYSLOG]
|
||||
):
|
||||
return LogFormat.APACHE
|
||||
|
||||
if format_counts[LogFormat.JSON] > 0:
|
||||
@@ -80,21 +86,22 @@ class ParserFactory:
|
||||
|
||||
return LogFormat.UNKNOWN
|
||||
|
||||
def parse_lines(self, lines: List[str], format: Optional[LogFormat] = None) -> List[ParsedLogEntry]:
|
||||
def parse_lines(
|
||||
self, lines: list[str], format: Optional[LogFormat] = None
|
||||
) -> list[ParsedLogEntry]:
|
||||
"""Parse lines with automatic format detection."""
|
||||
if format is None:
|
||||
format = self.detect_format_batch(lines)
|
||||
|
||||
if format == LogFormat.UNKNOWN:
|
||||
return [ParsedLogEntry(
|
||||
raw_line=line,
|
||||
message="Unknown format",
|
||||
line_number=i + 1
|
||||
) for i, line in enumerate(lines)]
|
||||
return [
|
||||
ParsedLogEntry(raw_line=line, message="Unknown format", line_number=i + 1)
|
||||
for i, line in enumerate(lines)
|
||||
]
|
||||
|
||||
parser = self.get_parser(format)
|
||||
return parser.parse_batch(lines)
|
||||
|
||||
def get_available_formats(self) -> List[LogFormat]:
|
||||
def get_available_formats(self) -> list[LogFormat]:
|
||||
"""Get list of available log formats."""
|
||||
return list(self.parsers.keys())
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""JSON log parser."""
|
||||
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
import orjson
|
||||
|
||||
from loglens.parsers.base import LogParser, ParsedLogEntry
|
||||
@@ -15,8 +15,15 @@ class JSONParser(LogParser):
|
||||
|
||||
def __init__(self):
|
||||
self.timestamp_fields = [
|
||||
"@timestamp", "timestamp", "time", "date", "datetime",
|
||||
"created_at", "updated_at", "log_time", "event_time"
|
||||
"@timestamp",
|
||||
"timestamp",
|
||||
"time",
|
||||
"date",
|
||||
"datetime",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"log_time",
|
||||
"event_time",
|
||||
]
|
||||
self.level_fields = ["level", "severity", "log_level", "priority", "levelname"]
|
||||
self.message_fields = ["message", "msg", "log", "text", "content"]
|
||||
@@ -48,32 +55,32 @@ class JSONParser(LogParser):
|
||||
raw_line=line,
|
||||
message=f"JSON parse error: {str(e)}",
|
||||
line_number=line_number,
|
||||
severity="error"
|
||||
severity="error",
|
||||
)
|
||||
|
||||
entry = ParsedLogEntry(
|
||||
raw_line=line,
|
||||
line_number=line_number
|
||||
)
|
||||
entry = ParsedLogEntry(raw_line=line, line_number=line_number)
|
||||
|
||||
if isinstance(data, dict):
|
||||
entry.timestamp = self._extract_timestamp(data)
|
||||
entry.level = self._extract_field(data, self.level_fields)
|
||||
entry.message = self._extract_field(data, self.message_fields)
|
||||
entry.logger = self._extract_field(data, self.logger_fields)
|
||||
entry.extra = {k: v for k, v in data.items()
|
||||
entry.extra = {
|
||||
k: v
|
||||
for k, v in data.items()
|
||||
if k not in self.timestamp_fields
|
||||
and k not in self.level_fields
|
||||
and k not in self.message_fields
|
||||
and k not in self.logger_fields
|
||||
and not k.startswith("_")}
|
||||
and not k.startswith("_")
|
||||
}
|
||||
elif isinstance(data, list):
|
||||
entry.message = str(data)
|
||||
entry.extra = {"array_length": len(data)}
|
||||
|
||||
return entry
|
||||
|
||||
def _extract_timestamp(self, data: Dict[str, Any]) -> Optional[datetime]:
|
||||
def _extract_timestamp(self, data: dict[str, Any]) -> Optional[datetime]:
|
||||
"""Extract timestamp from data dict."""
|
||||
for field in self.timestamp_fields:
|
||||
if field in data:
|
||||
@@ -87,7 +94,7 @@ class JSONParser(LogParser):
|
||||
pass
|
||||
return None
|
||||
|
||||
def _extract_field(self, data: Dict[str, Any], fields: List[str]) -> Optional[str]:
|
||||
def _extract_field(self, data: dict[str, Any], fields: list[str]) -> Optional[str]:
|
||||
"""Extract first matching field from data."""
|
||||
for field in fields:
|
||||
if field in data and data[field] is not None:
|
||||
@@ -97,7 +104,7 @@ class JSONParser(LogParser):
|
||||
return str(value)
|
||||
return None
|
||||
|
||||
def parse_batch(self, lines: List[str]) -> List[ParsedLogEntry]:
|
||||
def parse_batch(self, lines: list[str]) -> list[ParsedLogEntry]:
|
||||
"""Parse multiple lines, handling multi-line JSON."""
|
||||
results = []
|
||||
buffer = ""
|
||||
@@ -123,12 +130,14 @@ class JSONParser(LogParser):
|
||||
except orjson.JSONDecodeError:
|
||||
if line_stripped.startswith("{") or line_stripped.startswith("["):
|
||||
if line_stripped.endswith("}") or line_stripped.endswith("]"):
|
||||
results.append(ParsedLogEntry(
|
||||
results.append(
|
||||
ParsedLogEntry(
|
||||
raw_line=line,
|
||||
message="Invalid JSON",
|
||||
line_number=line_number,
|
||||
severity="error"
|
||||
))
|
||||
severity="error",
|
||||
)
|
||||
)
|
||||
buffer = ""
|
||||
elif buffer.endswith("}") or buffer.endswith("]"):
|
||||
try:
|
||||
@@ -136,41 +145,45 @@ class JSONParser(LogParser):
|
||||
entry = self._create_entry_from_data(data, buffer, line_number)
|
||||
results.append(entry)
|
||||
except orjson.JSONDecodeError:
|
||||
results.append(ParsedLogEntry(
|
||||
results.append(
|
||||
ParsedLogEntry(
|
||||
raw_line=buffer,
|
||||
message="Invalid JSON",
|
||||
line_number=line_number,
|
||||
severity="error"
|
||||
))
|
||||
severity="error",
|
||||
)
|
||||
)
|
||||
buffer = ""
|
||||
elif len(buffer) > 10000:
|
||||
results.append(ParsedLogEntry(
|
||||
results.append(
|
||||
ParsedLogEntry(
|
||||
raw_line=buffer[:100] + "...",
|
||||
message="JSON too large to parse",
|
||||
line_number=line_number,
|
||||
severity="error"
|
||||
))
|
||||
severity="error",
|
||||
)
|
||||
)
|
||||
buffer = ""
|
||||
|
||||
return results
|
||||
|
||||
def _create_entry_from_data(self, data: Any, raw_line: str, line_number: int) -> ParsedLogEntry:
|
||||
"""Create ParsedLogEntry from parsed JSON data."""
|
||||
entry = ParsedLogEntry(
|
||||
raw_line=raw_line,
|
||||
line_number=line_number
|
||||
)
|
||||
entry = ParsedLogEntry(raw_line=raw_line, line_number=line_number)
|
||||
|
||||
if isinstance(data, dict):
|
||||
entry.timestamp = self._extract_timestamp(data)
|
||||
entry.level = self._extract_field(data, self.level_fields)
|
||||
entry.message = self._extract_field(data, self.message_fields)
|
||||
entry.logger = self._extract_field(data, self.logger_fields)
|
||||
entry.extra = {k: v for k, v in data.items()
|
||||
entry.extra = {
|
||||
k: v
|
||||
for k, v in data.items()
|
||||
if k not in self.timestamp_fields
|
||||
and k not in self.level_fields
|
||||
and k not in self.message_fields
|
||||
and k not in self.logger_fields}
|
||||
and k not in self.logger_fields
|
||||
}
|
||||
else:
|
||||
entry.message = str(data)
|
||||
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Match, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
from dateutil import parser as date_parser
|
||||
|
||||
from loglens.parsers.base import LogParser, ParsedLogEntry
|
||||
@@ -14,11 +15,11 @@ class SyslogParser(LogParser):
|
||||
format_name = "syslog"
|
||||
|
||||
SYSLOG_RFC3164_PATTERN = re.compile(
|
||||
r'^(?P<month>[A-Z][a-z]{2})\s+(?P<day>\d{1,2})\s+(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})\s+(?P<hostname>[\w.-]+)\s+(?P<process>[\w\[\]]+):\s*(?P<message>.*)$'
|
||||
r"^(?P<month>[A-Z][a-z]{2})\s+(?P<day>\d{1,2})\s+(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})\s+(?P<hostname>[\w.-]+)\s+(?P<process>[\w\[\]]+):\s*(?P<message>.*)$"
|
||||
)
|
||||
|
||||
SYSLOG_RFC5424_PATTERN = re.compile(
|
||||
r'^(?P<pri><\d+>)?(?P<version>\d+)\s+(?P<timestamp>\S+)\s+(?P<hostname>\S+)\s+(?P<process>\S+)\s+(?P<pid>\S+)\s+(?P<msgid>\S+)?\s*(?P<struct_data>-)\s*(?P<message>.*)$'
|
||||
r"^(?P<pri><\d+>)?(?P<version>\d+)\s+(?P<timestamp>\S+)\s+(?P<hostname>\S+)\s+(?P<process>\S+)\s+(?P<pid>\S+)\s+(?P<msgid>\S+)?\s*(?P<struct_data>-)\s*(?P<message>.*)$"
|
||||
)
|
||||
|
||||
PRIORITY_MAP = {
|
||||
@@ -29,7 +30,7 @@ class SyslogParser(LogParser):
|
||||
4: "warning",
|
||||
5: "notice",
|
||||
6: "info",
|
||||
7: "debug"
|
||||
7: "debug",
|
||||
}
|
||||
|
||||
FACILITY_MAP = {
|
||||
@@ -56,13 +57,23 @@ class SyslogParser(LogParser):
|
||||
20: "local4",
|
||||
21: "local5",
|
||||
22: "local6",
|
||||
23: "local7"
|
||||
23: "local7",
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.month_map = {
|
||||
"Jan": 1, "Feb": 2, "Mar": 3, "Apr": 4, "May": 5, "Jun": 6,
|
||||
"Jul": 7, "Aug": 8, "Sep": 9, "Oct": 10, "Nov": 11, "Dec": 12
|
||||
"Jan": 1,
|
||||
"Feb": 2,
|
||||
"Mar": 3,
|
||||
"Apr": 4,
|
||||
"May": 5,
|
||||
"Jun": 6,
|
||||
"Jul": 7,
|
||||
"Aug": 8,
|
||||
"Sep": 9,
|
||||
"Oct": 10,
|
||||
"Nov": 11,
|
||||
"Dec": 12,
|
||||
}
|
||||
|
||||
def can_parse(self, line: str) -> bool:
|
||||
@@ -90,10 +101,7 @@ class SyslogParser(LogParser):
|
||||
if not line:
|
||||
return None
|
||||
|
||||
entry = ParsedLogEntry(
|
||||
raw_line=line,
|
||||
line_number=line_number
|
||||
)
|
||||
entry = ParsedLogEntry(raw_line=line, line_number=line_number)
|
||||
|
||||
if line.startswith("<"):
|
||||
parsed = self._parse_rfc5424(line)
|
||||
@@ -110,7 +118,7 @@ class SyslogParser(LogParser):
|
||||
|
||||
return entry
|
||||
|
||||
def _parse_rfc3164(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
def _parse_rfc3164(self, line: str) -> Optional[dict[str, Any]]:
|
||||
"""Parse RFC 3164 syslog format."""
|
||||
match = self.SYSLOG_RFC3164_PATTERN.match(line)
|
||||
if not match:
|
||||
@@ -126,9 +134,7 @@ class SyslogParser(LogParser):
|
||||
message = match.group("message")
|
||||
|
||||
current_year = datetime.now().year
|
||||
timestamp = datetime(
|
||||
current_year, self.month_map[month], day, hour, minute, second
|
||||
)
|
||||
timestamp = datetime(current_year, self.month_map[month], day, hour, minute, second)
|
||||
|
||||
level = self._infer_level(message)
|
||||
|
||||
@@ -137,23 +143,23 @@ class SyslogParser(LogParser):
|
||||
"hostname": hostname,
|
||||
"process": process,
|
||||
"message": message,
|
||||
"level": level
|
||||
"level": level,
|
||||
}
|
||||
|
||||
def _parse_rfc5424(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
def _parse_rfc5424(self, line: str) -> Optional[dict[str, Any]]:
|
||||
"""Parse RFC 5424 syslog format."""
|
||||
match = self.SYSLOG_RFC5424_PATTERN.match(line)
|
||||
if not match:
|
||||
return None
|
||||
|
||||
raw_pri = match.group("pri")
|
||||
version = match.group("version")
|
||||
_ = match.group("version")
|
||||
timestamp_str = match.group("timestamp")
|
||||
hostname = match.group("hostname")
|
||||
process = match.group("process")
|
||||
pid = match.group("pid")
|
||||
msgid = match.group("msgid")
|
||||
struct_data = match.group("struct_data")
|
||||
_ = match.group("msgid")
|
||||
_ = match.group("struct_data")
|
||||
message = match.group("message")
|
||||
|
||||
try:
|
||||
@@ -163,7 +169,6 @@ class SyslogParser(LogParser):
|
||||
|
||||
priority = None
|
||||
facility = None
|
||||
level = None
|
||||
if raw_pri:
|
||||
pri_num = int(raw_pri[1:-1])
|
||||
priority = pri_num & 0x07
|
||||
@@ -180,7 +185,7 @@ class SyslogParser(LogParser):
|
||||
"process": f"{process}[{pid}]" if pid else process,
|
||||
"message": message,
|
||||
"level": level,
|
||||
"facility": facility
|
||||
"facility": facility,
|
||||
}
|
||||
|
||||
def _infer_level(self, message: str) -> Optional[str]:
|
||||
|
||||
@@ -45,14 +45,14 @@ def sample_apache_logs():
|
||||
def error_logs():
|
||||
"""Sample error log lines."""
|
||||
return [
|
||||
'Jan 15 10:30:00 server-01 app[1234]: Traceback (most recent call last):',
|
||||
"Jan 15 10:30:00 server-01 app[1234]: Traceback (most recent call last):",
|
||||
'Jan 15 10:30:01 server-01 app[1234]: File "main.py", line 42, in <module>',
|
||||
'Jan 15 10:30:02 server-01 app[1234]: result = process_data(data)',
|
||||
"Jan 15 10:30:02 server-01 app[1234]: result = process_data(data)",
|
||||
'Jan 15 10:30:03 server-01 app[1234]: File "main.py", line 100, in process_data',
|
||||
'Jan 15 10:30:04 server-01 app[1234]: KeyError: "Missing required key \'id\'"',
|
||||
"Jan 15 10:30:04 server-01 app[1234]: KeyError: \"Missing required key 'id'\"",
|
||||
'{"level": "ERROR", "message": "NullPointerException: Cannot call method on null object"}',
|
||||
'{"level": "ERROR", "message": "HTTP Error 500: Internal Server Error"}',
|
||||
'Jan 15 10:30:05 server-01 kernel: [1234.567] Out of memory: Kill process 1234 (app)',
|
||||
"Jan 15 10:30:05 server-01 kernel: [1234.567] Out of memory: Kill process 1234 (app)",
|
||||
]
|
||||
|
||||
|
||||
@@ -60,4 +60,5 @@ def error_logs():
|
||||
def analyzer():
|
||||
"""Log analyzer instance."""
|
||||
from loglens.analyzers.analyzer import LogAnalyzer
|
||||
|
||||
return LogAnalyzer()
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
"""Integration tests for LogLens."""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from loglens.parsers.base import LogFormat
|
||||
from loglens.analyzers.analyzer import LogAnalyzer
|
||||
from loglens.formatters.table_formatter import TableFormatter
|
||||
from loglens.formatters.json_formatter import JSONFormatter
|
||||
from loglens.parsers.base import LogFormat
|
||||
|
||||
|
||||
class TestEndToEnd:
|
||||
@@ -48,10 +43,10 @@ Jan 15 10:30:02 server-01 app[1234]: WARNING: High memory usage
|
||||
|
||||
def test_apache_file_analysis(self, tmp_path):
|
||||
"""Test complete Apache file analysis."""
|
||||
log_content = '''192.168.1.1 - - [15/Jan/2024:10:30:00 +0000] "GET /api/users HTTP/1.1" 200 1234
|
||||
log_content = """192.168.1.1 - - [15/Jan/2024:10:30:00 +0000] "GET /api/users HTTP/1.1" 200 1234
|
||||
192.168.1.2 - - [15/Jan/2024:10:30:01 +0000] "POST /api/login HTTP/1.1" 401 567
|
||||
192.168.1.3 - - [15/Jan/2024:10:30:02 +0000] "GET /api/orders HTTP/1.1" 500 4321
|
||||
'''
|
||||
"""
|
||||
log_file = tmp_path / "apache.log"
|
||||
log_file.write_text(log_content)
|
||||
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
"""Unit tests for log analyzer."""
|
||||
|
||||
import pytest
|
||||
from loglens.analyzers.analyzer import LogAnalyzer, AnalysisResult
|
||||
from loglens.analyzers.patterns import PatternLibrary, ErrorPattern
|
||||
from loglens.analyzers.patterns import ErrorPattern, PatternLibrary
|
||||
from loglens.analyzers.severity import SeverityClassifier, SeverityLevel
|
||||
from loglens.parsers.base import LogFormat
|
||||
|
||||
@@ -63,9 +61,7 @@ class TestPatternLibrary:
|
||||
"""Test adding custom pattern."""
|
||||
library = PatternLibrary()
|
||||
custom = ErrorPattern(
|
||||
name="Custom Error",
|
||||
pattern="UNIQUE_CUSTOM_PATTERN_12345",
|
||||
severity="error"
|
||||
name="Custom Error", pattern="UNIQUE_CUSTOM_PATTERN_12345", severity="error"
|
||||
)
|
||||
library.add_pattern(custom)
|
||||
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
"""Unit tests for CLI commands."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
from unittest.mock import patch, MagicMock
|
||||
import sys
|
||||
from io import StringIO
|
||||
|
||||
from loglens.cli.commands import analyze, watch, report, patterns, info
|
||||
from loglens.cli.commands import analyze, info, patterns, report, watch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -18,7 +17,7 @@ def runner():
|
||||
class TestAnalyzeCommand:
|
||||
"""Tests for analyze command."""
|
||||
|
||||
@patch('loglens.cli.commands.LogAnalyzer')
|
||||
@patch("loglens.cli.commands.LogAnalyzer")
|
||||
def test_analyze_file(self, mock_analyzer_class, runner, tmp_path):
|
||||
"""Test analyzing a log file."""
|
||||
mock_analyzer = MagicMock()
|
||||
@@ -43,7 +42,7 @@ class TestAnalyzeCommand:
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
@patch('loglens.cli.commands.LogAnalyzer')
|
||||
@patch("loglens.cli.commands.LogAnalyzer")
|
||||
def test_analyze_json_output(self, mock_analyzer_class, runner, tmp_path):
|
||||
"""Test analyze with JSON output."""
|
||||
mock_analyzer = MagicMock()
|
||||
@@ -90,7 +89,7 @@ class TestWatchCommand:
|
||||
class TestReportCommand:
|
||||
"""Tests for report command."""
|
||||
|
||||
@patch('loglens.cli.commands.LogAnalyzer')
|
||||
@patch("loglens.cli.commands.LogAnalyzer")
|
||||
def test_report_to_file(self, mock_analyzer_class, runner, tmp_path):
|
||||
"""Test report generation to file."""
|
||||
mock_analyzer = MagicMock()
|
||||
@@ -118,7 +117,7 @@ class TestReportCommand:
|
||||
class TestPatternsCommand:
|
||||
"""Tests for patterns command."""
|
||||
|
||||
@patch('loglens.cli.commands.LogAnalyzer')
|
||||
@patch("loglens.cli.commands.LogAnalyzer")
|
||||
def test_list_patterns(self, mock_analyzer_class, runner):
|
||||
"""Test listing patterns."""
|
||||
mock_analyzer = MagicMock()
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
"""Unit tests for log parsers."""
|
||||
|
||||
import pytest
|
||||
from loglens.parsers.base import LogFormat, ParsedLogEntry
|
||||
from loglens.parsers.apache_parser import ApacheParser
|
||||
from loglens.parsers.base import LogFormat
|
||||
from loglens.parsers.factory import ParserFactory
|
||||
from loglens.parsers.json_parser import JSONParser
|
||||
from loglens.parsers.syslog_parser import SyslogParser
|
||||
from loglens.parsers.apache_parser import ApacheParser
|
||||
from loglens.parsers.factory import ParserFactory
|
||||
|
||||
|
||||
class TestJSONParser:
|
||||
@@ -29,7 +28,7 @@ class TestJSONParser:
|
||||
parser = JSONParser()
|
||||
|
||||
assert parser.can_parse('{"key": "value"}')
|
||||
assert parser.can_parse('[1, 2, 3]')
|
||||
assert parser.can_parse("[1, 2, 3]")
|
||||
assert not parser.can_parse("not json")
|
||||
assert not parser.can_parse("")
|
||||
|
||||
@@ -120,7 +119,7 @@ class TestApacheParser:
|
||||
def test_error_log(self):
|
||||
"""Test parsing Apache error log."""
|
||||
parser = ApacheParser()
|
||||
line = '[Sat Jan 15 10:30:00.123456 2024] [mpm_prefork:notice] [pid 1234] AH00163: Apache configured'
|
||||
line = "[Sat Jan 15 10:30:00.123456 2024] [mpm_prefork:notice] [pid 1234] AH00163: Apache configured"
|
||||
|
||||
entry = parser.parse(line, 1)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user