Fix CI: resolve linting errors and test failures
Some checks failed
CI / test (push) Has been cancelled
Some checks failed
CI / test (push) Has been cancelled
This commit is contained in:
@@ -1,9 +1,5 @@
|
|||||||
"""Ollama client wrapper for LLM interactions."""
|
"""Ollama client wrapper for LLM interactions."""
|
||||||
import ollama as ollama_lib
|
import ollama as ollama_lib
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ollama import ChatResponse, ListResponse
|
|
||||||
|
|
||||||
|
|
||||||
class OllamaClient:
|
class OllamaClient:
|
||||||
@@ -113,7 +109,7 @@ class OllamaClient:
|
|||||||
List of available models with their details.
|
List of available models with their details.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
response = ollama_lib.list()
|
response = ollama_client.list()
|
||||||
return response.get("models", [])
|
return response.get("models", [])
|
||||||
except Exception:
|
except Exception:
|
||||||
return []
|
return []
|
||||||
@@ -139,7 +135,7 @@ def get_ollama_client(host: str = "http://localhost:11434", model: str = "llama3
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
host: Ollama server URL.
|
host: Ollama server URL.
|
||||||
model: Default model to use.
|
model: HTTP_HOST, model to use.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
OllamaClient instance.
|
OllamaClient instance.
|
||||||
|
|||||||
Reference in New Issue
Block a user