From b078de8ebb5f1d09fa72b7ae350cd673e2d63029 Mon Sep 17 00:00:00 2001 From: 7000pctAUTO Date: Thu, 5 Feb 2026 06:34:48 +0000 Subject: [PATCH] Initial upload: Local AI Commit Reviewer CLI with CI/CD workflow --- tests/unit/test_llm.py | 54 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 tests/unit/test_llm.py diff --git a/tests/unit/test_llm.py b/tests/unit/test_llm.py new file mode 100644 index 0000000..6d7d7e7 --- /dev/null +++ b/tests/unit/test_llm.py @@ -0,0 +1,54 @@ +import pytest +from unittest.mock import MagicMock, patch +from src.llm.provider import LLMProvider, LLMResponse, ModelInfo + + +class MockLLMProvider(LLMProvider): + def __init__(self, available: bool = True): + self._available = available + self._models = [] + + def is_available(self) -> bool: + return self._available + + def generate(self, prompt: str, **kwargs) -> LLMResponse: + return LLMResponse( + text="Mock review response", + model="mock-model", + tokens_used=100, + finish_reason="stop" + ) + + async def agenerate(self, prompt: str, **kwargs) -> LLMResponse: + return self.generate(prompt, **kwargs) + + def stream_generate(self, prompt: str, **kwargs): + yield "Mock" + + def list_models(self) -> list[ModelInfo]: + return self._models + + def health_check(self) -> bool: + return self._available + + +class TestLLMProvider: + def test_mock_provider_is_available(self): + provider = MockLLMProvider(available=True) + assert provider.is_available() is True + + def test_mock_provider_not_available(self): + provider = MockLLMProvider(available=False) + assert provider.is_available() is False + + def test_mock_generate(self): + provider = MockLLMProvider() + response = provider.generate("test prompt") + assert isinstance(response, LLMResponse) + assert response.text == "Mock review response" + assert response.model == "mock-model" + + def test_mock_list_models(self): + provider = MockLLMProvider() + models = provider.list_models() + assert isinstance(models, list)