Initial upload of auto-changelog-generator
Some checks failed
CI / test (push) Has been cancelled
CI / build (push) Has been cancelled

This commit is contained in:
2026-01-29 12:00:30 +00:00
parent 9f948a803a
commit 7d6ae903cc

View File

@@ -0,0 +1,144 @@
from dataclasses import dataclass, field
from typing import Optional
@dataclass
class CategorizedChange:
"""Represents a single categorized change."""
type: str
scope: Optional[str]
description: str
file_path: str
breaking_change: bool = False
breaking_description: Optional[str] = None
confidence: float = 0.0
@dataclass
class LLMResponse:
"""Response from LLM for change categorization."""
changes: list[CategorizedChange]
summary: str
version: str = "1.0"
breaking_changes: list[str] = field(default_factory=list)
contributors: list[str] = field(default_factory=list)
@dataclass
class Config:
"""Configuration for LLM client."""
base_url: str = "http://localhost:11434"
model: str = "llama3.2"
temperature: float = 0.3
max_tokens: int = 2000
timeout: int = 120
class OllamaAPIClient:
"""Client for interacting with Ollama or LM Studio local API."""
def __init__(self, config: Optional[Config] = None):
self.config = config or Config()
self.base_url = self.config.base_url
self.model = self.config.model
def _make_request(self, endpoint: str, payload: dict) -> dict:
"""Make HTTP request to LLM API."""
import requests
url = f"{self.base_url}/{endpoint}"
response = requests.post(
url,
json=payload,
timeout=self.config.timeout
)
response.raise_for_status()
return response.json()
def generate(self, prompt: str) -> str:
"""Generate response from LLM."""
payload = {
"model": self.model,
"prompt": prompt,
"stream": False,
"options": {
"temperature": self.config.temperature,
"num_predict": self.config.max_tokens,
}
}
result = self._make_request("api/generate", payload)
return result.get("response", "")
def chat(self, messages: list[dict]) -> str:
"""Send chat messages to LLM."""
payload = {
"model": self.model,
"messages": messages,
"stream": False,
"options": {
"temperature": self.config.temperature,
"num_predict": self.config.max_tokens,
}
}
result = self._make_request("api/chat", payload)
return result.get("message", {}).get("content", "")
def is_available(self) -> bool:
"""Check if LLM API is available."""
try:
import requests
response = requests.get(
f"{self.base_url}/api/tags",
timeout=5
)
return response.status_code == 200
except Exception:
return False
def build_categorization_prompt(changes_text: str) -> str:
"""Build prompt for categorizing changes."""
return f"""You are a helpful assistant that categorizes git changes according to the Conventional Commits specification.
Please analyze the following git diff changes and categorize each change into one of these types:
- feat: A new feature
- fix: A bug fix
- docs: Documentation changes
- breaking: Breaking changes (could be part of any type)
- refactor: Code refactoring without feature/fix
- style: Formatting, missing semi-colons, etc.
- test: Adding or modifying tests
- chore: Maintenance tasks, dependency updates, etc.
For each change, provide:
1. The type (one of the above)
2. An optional scope (e.g., 'auth', 'api', 'ui')
3. A concise description (imperative mood, max 100 chars)
4. Whether it's a breaking change
5. Breaking change description if applicable
Also provide a brief summary of all changes combined (2-3 sentences).
Output your response in the following JSON format:
{{
"changes": [
{{
"type": "feat",
"scope": "auth",
"description": "add user login functionality",
"file_path": "src/auth/login.py",
"breaking_change": false,
"breaking_description": null,
"confidence": 0.9
}}
],
"summary": "This release adds user authentication, fixes a memory leak, and updates documentation.",
"version": "1.0.0",
"breaking_changes": [],
"contributors": []
}}
Here are the changes to categorize:
{changes_text}
Remember to output ONLY valid JSON without any additional text or formatting."""