Fix test files to match implementation
Some checks failed
CI / test (push) Failing after 10s

This commit is contained in:
2026-02-04 11:42:02 +00:00
parent 3f1771eccd
commit 50695da132

View File

@@ -1,7 +1,6 @@
"""Tests for Ollama client module."""
import pytest
from unittest.mock import Mock, patch
from unittest.mock import patch, MagicMock
from shellgenius.ollama_client import OllamaClient, get_ollama_client
@@ -9,10 +8,13 @@ from shellgenius.ollama_client import OllamaClient, get_ollama_client
class TestOllamaClient:
def test_init(self):
"""Test client initialization."""
with patch('shellgenius.ollama_client.get_config') as mock_config:
mock_config.return_value.ollama_host = "localhost:11434"
mock_config.return_value.ollama_model = "codellama"
mock_config = MagicMock()
mock_config.get.side_effect = lambda key, default=None: {
"ollama_host": "localhost:11434",
"ollama_model": "codellama",
}.get(key, default)
with patch('shellgenius.ollama_client.get_config', return_value=mock_config):
client = OllamaClient()
assert client.host == "localhost:11434"
@@ -20,21 +22,27 @@ class TestOllamaClient:
def test_is_available(self):
"""Test availability check."""
with patch('shellgenius.ollama_client.get_config') as mock_config:
mock_config.return_value.ollama_host = "localhost:11434"
mock_config.return_value.ollama_model = "codellama"
mock_config = MagicMock()
mock_config.get.side_effect = lambda key, default=None: {
"ollama_host": "localhost:11434",
"ollama_model": "codellama",
}.get(key, default)
with patch('shellgenius.ollama_client.get_config', return_value=mock_config):
client = OllamaClient()
with patch.object(client, 'list_models', return_value=["codellama"]):
assert client.is_available() == True
assert client.is_available() is True
def test_list_models(self):
"""Test listing models."""
with patch('shellgenius.ollama_client.get_config') as mock_config:
mock_config.return_value.ollama_host = "localhost:11434"
mock_config.return_value.ollama_model = "codellama"
mock_config = MagicMock()
mock_config.get.side_effect = lambda key, default=None: {
"ollama_host": "localhost:11434",
"ollama_model": "codellama",
}.get(key, default)
with patch('shellgenius.ollama_client.get_config', return_value=mock_config):
client = OllamaClient()
mock_response = {"models": [{"name": "codellama"}, {"name": "llama2"}]}
@@ -47,10 +55,13 @@ class TestOllamaClient:
def test_generate(self):
"""Test text generation."""
with patch('shellgenius.ollama_client.get_config') as mock_config:
mock_config.return_value.ollama_host = "localhost:11434"
mock_config.return_value.ollama_model = "codellama"
mock_config = MagicMock()
mock_config.get.side_effect = lambda key, default=None: {
"ollama_host": "localhost:11434",
"ollama_model": "codellama",
}.get(key, default)
with patch('shellgenius.ollama_client.get_config', return_value=mock_config):
client = OllamaClient()
mock_response = {"response": "Generated text"}
@@ -58,7 +69,7 @@ class TestOllamaClient:
with patch.object(client.client, 'generate', return_value=mock_response):
result = client.generate("test prompt")
assert result["success"] == True
assert result["success"] is True
assert "Generated text" in str(result["response"])
@@ -66,8 +77,7 @@ class TestGetOllamaClient:
def test_convenience_function(self):
"""Test the convenience function for getting client."""
with patch('shellgenius.ollama_client.get_config') as mock_config:
mock_config.return_value.ollama_host = "localhost:11434"
mock_config.return_value.ollama_model = "custom-model"
mock_config.return_value = {}
client = get_ollama_client(host="custom:9999", model="custom-model")