Add CLI and services modules
Some checks failed
CI / test (push) Has been cancelled
CI / lint (push) Has been cancelled
CI / typecheck (push) Has been cancelled

This commit is contained in:
2026-01-30 18:58:57 +00:00
parent a7f1bbb388
commit fb141a4cb8

View File

@@ -0,0 +1,132 @@
"""Ollama service for CodeXchange CLI."""
import ollama as ollama_client
from typing import List, Optional
from codexchange.config import get_config
from codexchange.models import ModelInfo
class OllamaService:
"""Service for interacting with Ollama API."""
def __init__(self, host: Optional[str] = None, timeout: int = 300):
"""Initialize the Ollama service.
Args:
host: Ollama host URL. If not provided, uses config.
timeout: Request timeout in seconds.
"""
config = get_config()
self.host = host or config.ollama_host
self.timeout = timeout
self._client = None
@property
def client(self):
"""Get or create Ollama client."""
if self._client is None:
self._client = ollama_client.Client(host=self.host)
return self._client
def test_connection(self) -> bool:
"""Test connection to Ollama.
Returns:
True if connection successful, False otherwise.
"""
try:
self.client.ps()
return True
except Exception:
return False
def list_models(self) -> List[ModelInfo]:
"""List available Ollama models.
Returns:
List of ModelInfo objects.
"""
try:
response = self.client.ps()
models = []
for model in response.get("models", []):
models.append(ModelInfo(
name=model.get("name", ""),
size=model.get("size"),
modified_at=model.get("modified_at")
))
return models
except Exception as e:
raise ConnectionError(f"Failed to list models: {e}")
def generate(
self,
prompt: str,
model: str,
stream: bool = False
) -> str:
"""Generate a response from Ollama.
Args:
prompt: Prompt to send to the model.
model: Model name to use.
stream: Whether to stream the response.
Returns:
Generated response text.
"""
try:
response = self.client.generate(
model=model,
prompt=prompt,
stream=stream,
options={
"timeout": self.timeout,
}
)
if stream:
full_response = []
for chunk in response:
if "response" in chunk:
full_response.append(chunk["response"])
return "".join(full_response)
else:
return response.get("response", "")
except Exception as e:
raise ConnectionError(f"Ollama request failed: {e}")
def connect(host: Optional[str] = None, timeout: int = 300) -> OllamaService:
"""Create and test connection to Ollama.
Args:
host: Ollama host URL.
timeout: Request timeout.
Returns:
Connected OllamaService instance.
"""
service = OllamaService(host=host, timeout=timeout)
if not service.test_connection():
raise ConnectionError(
f"Could not connect to Ollama at {service.host}. "
"Make sure Ollama is running."
)
return service
def list_models(host: Optional[str] = None, timeout: int = 300) -> List[ModelInfo]:
"""List available Ollama models.
Args:
host: Ollama host URL.
timeout: Request timeout.
Returns:
List of available models.
"""
service = connect(host=host, timeout=timeout)
return service.list_models()