123 lines
4.2 KiB
Python
123 lines
4.2 KiB
Python
from abc import ABC, abstractmethod
|
|
from typing import Any, Dict, Optional
|
|
import json
|
|
import requests
|
|
|
|
|
|
class LLMClient(ABC):
|
|
@abstractmethod
|
|
def get_recommendation(self, config_type: str, issues: list) -> str:
|
|
pass
|
|
|
|
|
|
class OllamaClient(LLMClient):
|
|
def __init__(self, endpoint: str = "http://localhost:11434", model: str = "llama3"):
|
|
self.endpoint = endpoint
|
|
self.model = model
|
|
|
|
def is_available(self) -> bool:
|
|
try:
|
|
response = requests.get(f"{self.endpoint}/api/tags", timeout=5)
|
|
return response.status_code == 200
|
|
except Exception:
|
|
return False
|
|
|
|
def get_recommendation(self, config_type: str, issues: list) -> str:
|
|
prompt = self._build_prompt(config_type, issues)
|
|
|
|
try:
|
|
response = requests.post(
|
|
f"{self.endpoint}/api/generate",
|
|
json={
|
|
"model": self.model,
|
|
"prompt": prompt,
|
|
"stream": False
|
|
},
|
|
timeout=30
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
result = response.json()
|
|
return result.get("response", "No recommendation available")
|
|
except Exception as e:
|
|
return f"LLM unavailable: {str(e)}. Falling back to rule-based suggestions."
|
|
|
|
return "Unable to get recommendation from LLM"
|
|
|
|
def _build_prompt(self, config_type: str, issues: list) -> str:
|
|
issues_text = "\n".join([f"- {i.get('message', '')}" for i in issues]) if issues else "No issues found"
|
|
|
|
prompt = f"""You are a configuration expert. Analyze the following {config_type} configuration issues and provide recommendations:
|
|
|
|
Issues found:
|
|
{issues_text}
|
|
|
|
Please provide:
|
|
1. A brief explanation of the issues
|
|
2. Specific fix recommendations with code examples
|
|
3. Best practices for this configuration type
|
|
|
|
Format your response clearly with sections."""
|
|
|
|
return prompt
|
|
|
|
|
|
class FallbackClient(LLMClient):
|
|
RECOMMENDATIONS = {
|
|
"deprecated-package": "Replace deprecated packages with their modern equivalents. Check the official documentation for migration guides.",
|
|
"outdated-version": "Consider updating dependencies to stable versions. Major version 0.x packages may have breaking changes.",
|
|
"missing-type-checking": "Enable strict mode in TypeScript for better type safety and improved developer experience.",
|
|
"missing-scripts": "Add standard scripts (test, build) to improve development workflow and CI/CD integration.",
|
|
"security-vulnerability": "Review security settings and ensure production configurations don't expose sensitive data.",
|
|
}
|
|
|
|
def get_recommendation(self, config_type: str, issues: list) -> str:
|
|
recommendations = []
|
|
|
|
for issue in issues:
|
|
category = issue.get("category", "")
|
|
if category in self.RECOMMENDATIONS:
|
|
recommendations.append(self.RECOMMENDATIONS[category])
|
|
|
|
if recommendations:
|
|
return "\n".join(recommendations)
|
|
|
|
return "Review the configuration for common best practices in your project type."
|
|
|
|
|
|
class LLMProvider:
|
|
def __init__(self, endpoint: str = "http://localhost:11434", model: str = "llama3"):
|
|
self.endpoint = endpoint
|
|
self.model = model
|
|
self._client: Optional[LLMClient] = None
|
|
|
|
def get_client(self) -> LLMClient:
|
|
if self._client is None:
|
|
try:
|
|
client = OllamaClient(self.endpoint, self.model)
|
|
if client.is_available():
|
|
self._client = client
|
|
else:
|
|
self._client = FallbackClient()
|
|
except Exception:
|
|
self._client = FallbackClient()
|
|
return self._client
|
|
|
|
def get_recommendation(self, config_type: str, issues: list) -> str:
|
|
client = self.get_client()
|
|
return client.get_recommendation(config_type, issues)
|
|
|
|
|
|
def load_config() -> Dict[str, Any]:
|
|
try:
|
|
with open("config.yaml", "r") as f:
|
|
import yaml
|
|
return yaml.safe_load(f) or {}
|
|
except FileNotFoundError:
|
|
return {}
|
|
|
|
|
|
def get_llm_config() -> Dict[str, Any]:
|
|
config = load_config()
|
|
return config.get("llm", {"endpoint": "http://localhost:11434", "model": "llama3"})
|