Add CLI commands and Gitea Actions workflow
This commit is contained in:
173
local_code_assistant/cli.py
Normal file
173
local_code_assistant/cli.py
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
"""Local Code Assistant - CLI for local AI code assistance."""
|
||||||
|
|
||||||
|
import click
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.panel import Panel
|
||||||
|
from rich.text import Text
|
||||||
|
|
||||||
|
from local_code_assistant import __version__
|
||||||
|
from local_code_assistant.commands.explain import explain_cmd
|
||||||
|
from local_code_assistant.commands.generate import generate_cmd
|
||||||
|
from local_code_assistant.commands.refactor import optimize_cmd, refactor_cmd
|
||||||
|
from local_code_assistant.commands.repl import repl_cmd
|
||||||
|
from local_code_assistant.commands.test import test_cmd
|
||||||
|
from local_code_assistant.prompts.templates import LanguageConfig
|
||||||
|
from local_code_assistant.services.config import ConfigService
|
||||||
|
from local_code_assistant.services.ollama import OllamaService
|
||||||
|
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
|
||||||
|
def get_config() -> ConfigService:
|
||||||
|
"""Get the configuration service instance."""
|
||||||
|
return ConfigService()
|
||||||
|
|
||||||
|
|
||||||
|
def get_ollama_service(config: ConfigService | None = None) -> OllamaService:
|
||||||
|
"""Get the Ollama service instance."""
|
||||||
|
if config is None:
|
||||||
|
config = get_config()
|
||||||
|
return OllamaService(config)
|
||||||
|
|
||||||
|
|
||||||
|
def print_welcome():
|
||||||
|
"""Print welcome message."""
|
||||||
|
welcome_text = Text()
|
||||||
|
welcome_text.append("Local Code Assistant\n", style="bold cyan")
|
||||||
|
welcome_text.append("AI-powered code assistance without leaving your machine.\n", style="dim")
|
||||||
|
welcome_text.append("\nPowered by local LLM models via Ollama.", style="italic")
|
||||||
|
|
||||||
|
console.print(Panel(welcome_text, title="Welcome", subtitle="Run --help for commands"))
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
@click.option("--verbose", "-v", is_flag=True, help="Enable verbose output")
|
||||||
|
@click.pass_context
|
||||||
|
def main(ctx: click.Context, verbose: bool):
|
||||||
|
"""Local Code Assistant - Run AI code assistance locally without external APIs.
|
||||||
|
|
||||||
|
This tool provides AI-powered code assistance using local LLM models through Ollama.
|
||||||
|
All processing happens locally - your code never leaves your machine.
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
generate Generate code from natural language prompts
|
||||||
|
explain Explain and document code
|
||||||
|
refactor Refactor code for better structure
|
||||||
|
optimize Optimize code for performance
|
||||||
|
test Generate unit tests
|
||||||
|
repl Enter interactive REPL mode
|
||||||
|
status Check connection and model status
|
||||||
|
version Show version information
|
||||||
|
"""
|
||||||
|
ctx.ensure_object(dict)
|
||||||
|
ctx.obj["verbose"] = verbose
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
ctx.obj["config"] = config
|
||||||
|
|
||||||
|
ollama = get_ollama_service(config)
|
||||||
|
ctx.obj["ollama_service"] = ollama
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
console.print(f"[dim]Config path: {config.config_path}[/dim]")
|
||||||
|
console.print(f"[dim]Ollama URL: {config.ollama_base_url}[/dim]")
|
||||||
|
console.print(f"[dim]Model: {config.ollama_model}[/dim]")
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
def version():
|
||||||
|
"""Show version information."""
|
||||||
|
version_text = Text()
|
||||||
|
version_text.append(f"Local Code Assistant v{__version__}\n", style="bold green")
|
||||||
|
version_text.append("Python CLI tool for local AI code assistance", style="dim")
|
||||||
|
version_text.append("\n\nSupported languages:", style="bold")
|
||||||
|
for lang in LanguageConfig.get_supported_languages():
|
||||||
|
version_text.append(f"\n - {lang}", style="cyan")
|
||||||
|
|
||||||
|
console.print(Panel(version_text, title="Version"))
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
def status():
|
||||||
|
"""Check connection status and model availability."""
|
||||||
|
status_text = Text()
|
||||||
|
status_text.append(f"Local Code Assistant v{__version__}\n", style="bold green")
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
ollama = get_ollama_service(config)
|
||||||
|
|
||||||
|
status_text.append(f"Ollama URL: {config.ollama_base_url}\n", style="cyan")
|
||||||
|
status_text.append(f"Default Model: {config.ollama_model}\n", style="cyan")
|
||||||
|
status_text.append(f"Streaming: {config.streaming}\n", style="cyan")
|
||||||
|
|
||||||
|
try:
|
||||||
|
if ollama.check_connection():
|
||||||
|
status_text.append("\nOllama Connection: ", style="bold")
|
||||||
|
status_text.append("OK\n", style="green")
|
||||||
|
|
||||||
|
models = ollama.list_models()
|
||||||
|
if models:
|
||||||
|
status_text.append("\nAvailable Models:\n", style="bold")
|
||||||
|
for model in models:
|
||||||
|
status_text.append(f" - {model}\n", style="yellow")
|
||||||
|
else:
|
||||||
|
status_text.append("\nAvailable Models: None found\n", style="red")
|
||||||
|
status_text.append("Run 'ollama pull <model>' to download models\n", style="dim")
|
||||||
|
else:
|
||||||
|
status_text.append("\nOllama Connection: ", style="bold")
|
||||||
|
status_text.append("FAILED\n", style="red")
|
||||||
|
status_text.append("Make sure Ollama is running:\n", style="dim")
|
||||||
|
status_text.append(" 1. Install Ollama from https://ollama.com\n", style="dim")
|
||||||
|
status_text.append(" 2. Run 'ollama serve'\n", style="dim")
|
||||||
|
status_text.append(" 3. Pull a model: 'ollama pull codellama'\n", style="dim")
|
||||||
|
except Exception as e:
|
||||||
|
status_text.append(f"\nError: {str(e)}\n", style="red")
|
||||||
|
|
||||||
|
console.print(Panel(status_text, title="Status"))
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
def models():
|
||||||
|
"""List available Ollama models."""
|
||||||
|
from local_code_assistant.services.ollama import OllamaConnectionError
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
ollama = get_ollama_service(config)
|
||||||
|
|
||||||
|
models_text = Text()
|
||||||
|
models_text.append("Available Ollama Models\n\n", style="bold green")
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not ollama.check_connection():
|
||||||
|
console.print("[red]Cannot connect to Ollama. Make sure it's running.[/red]")
|
||||||
|
return
|
||||||
|
|
||||||
|
models = ollama.list_models()
|
||||||
|
if models:
|
||||||
|
for model in models:
|
||||||
|
models_text.append(f" - {model}\n", style="cyan")
|
||||||
|
else:
|
||||||
|
models_text.append("No models found.\n", style="yellow")
|
||||||
|
models_text.append("Pull a model with: ollama pull <model>\n", style="dim")
|
||||||
|
|
||||||
|
except OllamaConnectionError:
|
||||||
|
console.print("[red]Cannot connect to Ollama. Make sure it's running.[/red]")
|
||||||
|
except Exception as e:
|
||||||
|
models_text.append(f"Error: {str(e)}\n", style="red")
|
||||||
|
|
||||||
|
console.print(Panel(models_text, title="Models"))
|
||||||
|
|
||||||
|
|
||||||
|
main.add_command(generate_cmd, "generate")
|
||||||
|
main.add_command(explain_cmd, "explain")
|
||||||
|
main.add_command(refactor_cmd, "refactor")
|
||||||
|
main.add_command(optimize_cmd, "optimize")
|
||||||
|
main.add_command(test_cmd, "test")
|
||||||
|
main.add_command(repl_cmd, "repl")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user