From 82ed4848413d03065514315ecdbb1af12f2e0e3b Mon Sep 17 00:00:00 2001 From: 7000pctAUTO Date: Thu, 5 Feb 2026 06:48:06 +0000 Subject: [PATCH] fix: resolve CI/CD type errors and workflow issues --- src/llm/ollama.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llm/ollama.py b/src/llm/ollama.py index 2fcbee6..04acbc8 100644 --- a/src/llm/ollama.py +++ b/src/llm/ollama.py @@ -94,7 +94,7 @@ class OllamaProvider(LLMProvider): except Exception as e: raise RuntimeError(f"Ollama async generation failed: {e}") from None - def stream_generate(self, prompt: str, **kwargs) -> AsyncIterator[str]: + async def stream_generate(self, prompt: str, **kwargs) -> AsyncIterator[str]: try: max_tokens = kwargs.get("max_tokens", 2048) temperature = kwargs.get("temperature", 0.3)