140 lines
4.8 KiB
Python
140 lines
4.8 KiB
Python
"""Tests for Local Code Assistant services."""
|
|
|
|
import pytest
|
|
from unittest.mock import Mock, patch, MagicMock
|
|
|
|
from local_code_assistant.services.ollama import (
|
|
OllamaService,
|
|
OllamaServiceError,
|
|
OllamaConnectionError,
|
|
OllamaModelError
|
|
)
|
|
from local_code_assistant.services.config import ConfigService
|
|
|
|
|
|
class TestOllamaService:
|
|
"""Tests for the Ollama service."""
|
|
|
|
@pytest.fixture
|
|
def mock_config(self):
|
|
"""Create a mock configuration."""
|
|
config = Mock(spec=ConfigService)
|
|
config.ollama_base_url = "http://localhost:11434"
|
|
config.ollama_model = "codellama"
|
|
config.ollama_timeout = 30
|
|
config.streaming = True
|
|
return config
|
|
|
|
@pytest.fixture
|
|
def ollama_service(self, mock_config):
|
|
"""Create an Ollama service with mock config."""
|
|
return OllamaService(mock_config)
|
|
|
|
def test_init(self, ollama_service, mock_config):
|
|
"""Test service initialization."""
|
|
assert ollama_service.base_url == mock_config.ollama_base_url
|
|
assert ollama_service.config == mock_config
|
|
|
|
@patch('local_code_assistant.services.ollama.requests.get')
|
|
def test_check_connection_success(self, mock_get, ollama_service):
|
|
"""Test successful connection check."""
|
|
mock_response = Mock()
|
|
mock_response.json.return_value = {"models": []}
|
|
mock_response.raise_for_status = Mock()
|
|
mock_get.return_value = mock_response
|
|
|
|
result = ollama_service.check_connection()
|
|
assert result is True
|
|
|
|
@patch('local_code_assistant.services.ollama.requests.get')
|
|
def test_check_connection_failure(self, mock_get, ollama_service):
|
|
"""Test failed connection check."""
|
|
import requests
|
|
mock_get.side_effect = requests.exceptions.ConnectionError()
|
|
|
|
result = ollama_service.check_connection()
|
|
assert result is False
|
|
|
|
@patch('local_code_assistant.services.ollama.requests.get')
|
|
def test_list_models(self, mock_get, ollama_service):
|
|
"""Test listing models."""
|
|
mock_response = Mock()
|
|
mock_response.json.return_value = {
|
|
"models": [
|
|
{"name": "codellama:latest"},
|
|
{"name": "llama3:latest"}
|
|
]
|
|
}
|
|
mock_response.raise_for_status = Mock()
|
|
mock_get.return_value = mock_response
|
|
|
|
models = ollama_service.list_models()
|
|
assert models == ["codellama:latest", "llama3:latest"]
|
|
|
|
@patch('local_code_assistant.services.ollama.requests.post')
|
|
def test_generate(self, mock_post, ollama_service):
|
|
"""Test code generation."""
|
|
mock_response = Mock()
|
|
mock_response.json.return_value = {
|
|
"response": "def hello(): pass"
|
|
}
|
|
mock_response.raise_for_status = Mock()
|
|
mock_post.return_value = mock_response
|
|
|
|
result = ollama_service.generate("Say hello")
|
|
assert result == "def hello(): pass"
|
|
|
|
@patch('local_code_assistant.services.ollama.requests.post')
|
|
def test_generate_with_model(self, mock_post, ollama_service):
|
|
"""Test generation with specific model."""
|
|
mock_response = Mock()
|
|
mock_response.json.return_value = {
|
|
"response": "print('hello')"
|
|
}
|
|
mock_response.raise_for_status = Mock()
|
|
mock_post.return_value = mock_response
|
|
|
|
result = ollama_service.generate("Say hello", model="llama3")
|
|
assert result == "print('hello')"
|
|
|
|
def test_connection_error(self, ollama_service):
|
|
"""Test connection error handling."""
|
|
with pytest.raises(OllamaConnectionError):
|
|
raise OllamaConnectionError("Connection failed")
|
|
|
|
|
|
class TestConfigService:
|
|
"""Tests for the configuration service."""
|
|
|
|
@pytest.fixture
|
|
def temp_config_file(self, tmp_path):
|
|
"""Create a temporary config file."""
|
|
config_content = """
|
|
ollama:
|
|
base_url: http://localhost:11434
|
|
model: test-model
|
|
timeout: 60
|
|
"""
|
|
config_file = tmp_path / "config.yaml"
|
|
config_file.write_text(config_content)
|
|
return str(config_file)
|
|
|
|
def test_default_config(self):
|
|
"""Test default configuration values."""
|
|
with patch('os.getenv', return_value=None):
|
|
config = ConfigService("/nonexistent/path")
|
|
assert config.ollama_base_url == "http://localhost:11434"
|
|
assert config.ollama_model == "codellama"
|
|
assert config.ollama_timeout == 8000
|
|
|
|
def test_env_override(self):
|
|
"""Test environment variable override."""
|
|
with patch.dict('os.environ', {'OLLAMA_MODEL': 'env-model'}):
|
|
config = ConfigService()
|
|
assert config.ollama_model == "env-model"
|
|
|
|
def test_properties(self, temp_config_file):
|
|
"""Test configuration property access."""
|
|
config = ConfigService(temp_config_file)
|
|
assert config.ollama_model == "test-model"
|
|
assert config.ollama_timeout == 60 |