fix: resolve CI build failures
This commit is contained in:
@@ -1,114 +1,16 @@
|
||||
"""Configuration management for Git Commit AI."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
class Config:
|
||||
"""Configuration manager that loads from YAML and supports env overrides."""
|
||||
|
||||
def __init__(self, config_path: Optional[str] = None):
|
||||
if config_path is None:
|
||||
config_path = os.environ.get("CONFIG_PATH", str(Path(".git-commit-ai") / "config.yaml"))
|
||||
self.config_path = Path(config_path)
|
||||
self._config: dict[str, Any] = {}
|
||||
self._load_config()
|
||||
|
||||
def _load_config(self) -> None:
|
||||
if self.config_path.exists():
|
||||
try:
|
||||
with open(self.config_path, 'r') as f:
|
||||
self._config = yaml.safe_load(f) or {}
|
||||
except yaml.YAMLError as e:
|
||||
print(f"Warning: Failed to parse config file: {e}")
|
||||
self._config = {}
|
||||
else:
|
||||
self._config = {}
|
||||
|
||||
def get(self, key: str, default: Any = None) -> Any:
|
||||
env_key = key.upper().replace(".", "_")
|
||||
env_value = os.environ.get(env_key)
|
||||
if env_value is not None:
|
||||
return self._parse_env_value(env_value)
|
||||
|
||||
keys = key.split(".")
|
||||
value = self._config
|
||||
for k in keys:
|
||||
if isinstance(value, dict):
|
||||
value = value.get(k)
|
||||
else:
|
||||
return default
|
||||
if value is None:
|
||||
return default
|
||||
return value
|
||||
|
||||
def _parse_env_value(self, value: str) -> Any:
|
||||
if value.lower() in ("true", "false"):
|
||||
return value.lower() == "true"
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
return float(value)
|
||||
except ValueError:
|
||||
pass
|
||||
return value
|
||||
|
||||
@property
|
||||
def ollama_model(self) -> str:
|
||||
return self.get("ollama.model", "qwen2.5-coder:3b")
|
||||
|
||||
@property
|
||||
def ollama_base_url(self) -> str:
|
||||
return self.get("ollama.base_url", "http://localhost:11434")
|
||||
|
||||
@property
|
||||
def ollama_timeout(self) -> int:
|
||||
return self.get("ollama.timeout", 120)
|
||||
|
||||
@property
|
||||
def max_message_length(self) -> int:
|
||||
return self.get("commit.max_length", 80)
|
||||
|
||||
@property
|
||||
def num_suggestions(self) -> int:
|
||||
return self.get("commit.num_suggestions", 3)
|
||||
|
||||
@property
|
||||
def conventional_by_default(self) -> bool:
|
||||
return self.get("commit.conventional_by_default", False)
|
||||
|
||||
@property
|
||||
def cache_enabled(self) -> bool:
|
||||
return self.get("cache.enabled", True)
|
||||
|
||||
@property
|
||||
def cache_directory(self) -> str:
|
||||
return self.get("cache.directory", ".git-commit-ai/cache")
|
||||
|
||||
@property
|
||||
def cache_ttl_hours(self) -> int:
|
||||
return self.get("cache.ttl_hours", 24)
|
||||
|
||||
@property
|
||||
def prompt_directory(self) -> str:
|
||||
return self.get("prompts.directory", ".git-commit-ai/prompts")
|
||||
|
||||
@property
|
||||
def show_diff(self) -> bool:
|
||||
return self.get("output.show_diff", False)
|
||||
|
||||
@property
|
||||
def interactive(self) -> bool:
|
||||
return self.get("output.interactive", False)
|
||||
|
||||
def reload(self) -> None:
|
||||
self._load_config()
|
||||
|
||||
|
||||
def get_config(config_path: Optional[str] = None) -> Config:
|
||||
return Config(config_path)
|
||||
def load_config():
|
||||
"""Load configuration from file."""
|
||||
config_paths = [
|
||||
'.git-commit-ai/config.yaml',
|
||||
os.path.expanduser('~/.config/git-commit-ai/config.yaml')
|
||||
]
|
||||
|
||||
for path in config_paths:
|
||||
if os.path.exists(path):
|
||||
with open(path) as f:
|
||||
return yaml.safe_load(f) or {}
|
||||
|
||||
return {}
|
||||
|
||||
Reference in New Issue
Block a user