Initial upload: Local LLM Prompt Manager CLI tool
Some checks failed
CI / lint (push) Has been cancelled
CI / build (push) Has been cancelled
CI / test (push) Has been cancelled

This commit is contained in:
2026-02-05 20:56:10 +00:00
parent 3aa0da245b
commit cc5d6d2323

39
src/llm/llm_factory.py Normal file
View File

@@ -0,0 +1,39 @@
"""LLM client factory for creating clients based on provider."""
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from .base import LLMClient
class LLMClientFactory:
"""Factory for creating LLM clients."""
@staticmethod
def create(provider: str = None, url: str = None) -> "LLMClient":
"""Create an LLM client for the specified provider."""
from ..config import get_config
from .lmstudio import LMStudioClient
from .ollama import OllamaClient
config = get_config()
provider = provider or config.default_provider
if provider.lower() == "ollama":
return OllamaClient(url)
elif provider.lower() == "lmstudio":
return LMStudioClient(url)
else:
raise ValueError(f"Unknown provider: {provider}")
@staticmethod
def get_default_client() -> "LLMClient":
"""Get the default LLM client based on configuration."""
from ..config import get_config
config = get_config()
return LLMClientFactory.create(config.default_provider)
@staticmethod
def list_providers() -> list[str]:
"""List available providers."""
return ["ollama", "lmstudio"]