Initial upload: Local LLM Prompt Manager CLI tool
Some checks failed
CI / test (push) Has been cancelled
CI / lint (push) Has been cancelled
CI / build (push) Has been cancelled

This commit is contained in:
2026-02-05 20:56:11 +00:00
parent cc5d6d2323
commit f83915e0d2

43
src/commands/__init__.py Normal file
View File

@@ -0,0 +1,43 @@
"""CLI command utilities and base classes."""
class CommandError(Exception):
"""Base exception for command errors."""
def __init__(self, message: str, suggestion: str = None):
super().__init__(message)
self.suggestion = suggestion
class PromptNotFoundError(CommandError):
"""Raised when a prompt is not found."""
def __init__(self, prompt_name: str):
super().__init__(
f"Prompt '{prompt_name}' not found",
"Use 'llm-prompt list' to see available prompts or check the prompt name spelling"
)
self.prompt_name = prompt_name
class TagNotFoundError(CommandError):
"""Raised when a tag is not found."""
def __init__(self, tag_name: str):
super().__init__(
f"Tag '{tag_name}' not found",
"Use 'llm-prompt tag list' to see available tags"
)
self.tag_name = tag_name
class ConnectionError(CommandError):
"""Raised when LLM connection fails."""
def __init__(self, provider: str, url: str):
super().__init__(
f"Failed to connect to {provider} at {url}",
"Ensure the LLM service is running and check the API URL in config"
)
self.provider = provider
self.url = url