fix: add analyzer modules
Some checks failed
CI / test (push) Failing after 13s
CI / build (push) Has been skipped

This commit is contained in:
2026-02-01 08:19:59 +00:00
parent 9b37df283b
commit 78a7883630

View File

@@ -1,77 +1,67 @@
from collections import defaultdict
from typing import Dict
from dataclasses import dataclass
from typing import Dict, Optional
from src.analyzers.git_repository import GitRepository
from src.models import VelocityAnalysis
from src.models.data_structures import VelocityAnalysis
@dataclass
class VelocityAnalyzer:
"""Analyze development velocity and productivity."""
"""Analyzes team velocity."""
def __init__(
self,
repo: GitRepository,
days: int = 30,
) -> None:
"""Initialize VelocityAnalyzer."""
self.repo = repo
self.days = days
repo: GitRepository
days: int
def analyze(self) -> VelocityAnalysis:
"""Analyze velocity metrics."""
commits = self.repo.get_commits(since_days=self.days)
def analyze(self) -> Optional[VelocityAnalysis]:
"""Analyze team velocity."""
commits = self.repo.get_commits()
if not commits:
return VelocityAnalysis()
return None
total_commits = len(commits)
commits_per_day = total_commits / max(self.days, 1)
commits_per_week = total_commits / max(self.days / 7, 1)
authors = self.repo.get_authors()
authors.sort(key=lambda a: a.commit_count, reverse=True)
top_contributors = authors[:10]
daily_totals = defaultdict(int)
weekly_totals = defaultdict(int)
commits_per_day = len(commits) / max(1, self.days)
commits_per_week = commits_per_day * 7
commits_per_month = commits_per_day * 30
day_counts = defaultdict(int)
hour_counts = defaultdict(int)
commits_by_day: Dict[str, int] = {}
commits_by_hour: Dict[str, int] = {}
for commit in commits:
day_key = commit.author_datetime.strftime("%Y-%m-%d")
week_key = commit.author_datetime.strftime("%Y-W%W")
day_name = commit.author_datetime.strftime("%A")
hour = commit.author_datetime.strftime("%H:00")
day_key = commit.timestamp.strftime("%A")
hour_key = commit.timestamp.strftime("%H:00")
daily_totals[day_key] += 1
weekly_totals[week_key] += 1
day_counts[day_name] += 1
hour_counts[hour] += 1
commits_by_day[day_key] = commits_by_day.get(day_key, 0) + 1
commits_by_hour[hour_key] = commits_by_hour.get(hour_key, 0) + 1
most_active_day = max(day_counts, key=day_counts.get) if day_counts else ""
most_active_hour = max(hour_counts, key=hour_counts.get) if hour_counts else ""
most_active_day = max(commits_by_day, key=commits_by_day.get) if commits_by_day else "N/A"
most_active_hour = max(commits_by_hour, key=commits_by_hour.get) if commits_by_hour else "N/A"
if len(weekly_totals) >= 2:
weeks = sorted(weekly_totals.keys())
recent_weeks = weeks[-2:]
if len(recent_weeks) == 2:
current = weekly_totals[recent_weeks[1]]
previous = weekly_totals[recent_weeks[0]]
if current > previous:
if len(commits) >= 2:
recent_commits = commits[:10]
older_commits = commits[10:20]
recent_avg = sum(1 for _ in recent_commits) / max(1, len(recent_commits))
older_avg = sum(1 for _ in older_commits) / max(1, len(older_commits))
if recent_avg > older_avg * 1.1:
velocity_trend = "increasing"
elif current < previous:
elif recent_avg < older_avg * 0.9:
velocity_trend = "decreasing"
else:
velocity_trend = "stable"
else:
velocity_trend = "stable"
else:
velocity_trend = "stable"
return VelocityAnalysis(
total_commits=total_commits,
commits_per_day=round(commits_per_day, 2),
commits_per_week=round(commits_per_week, 2),
commits_per_day=commits_per_day,
commits_per_week=commits_per_week,
commits_per_month=commits_per_month,
velocity_trend=velocity_trend,
top_contributors=top_contributors,
most_active_day=most_active_day,
most_active_hour=most_active_hour,
weekly_totals=dict(weekly_totals),
daily_totals=dict(daily_totals),
)