Add CLI commands and Gitea Actions workflow
This commit is contained in:
353
local_code_assistant/commands/repl.py
Normal file
353
local_code_assistant/commands/repl.py
Normal file
@@ -0,0 +1,353 @@
|
||||
"""Interactive REPL for Local Code Assistant."""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import click
|
||||
from prompt_toolkit import PromptSession
|
||||
from prompt_toolkit.history import FileHistory
|
||||
from prompt_toolkit.styles import Style
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.syntax import Syntax
|
||||
from rich.text import Text
|
||||
|
||||
from local_code_assistant.prompts.templates import LanguageConfig, PromptTemplates
|
||||
from local_code_assistant.services.ollama import OllamaService, OllamaServiceError
|
||||
|
||||
console = Console()
|
||||
|
||||
|
||||
class REPL:
|
||||
"""Interactive REPL for Local Code Assistant."""
|
||||
|
||||
PROMPT_STYLE = Style.from_dict({
|
||||
"prompt": "ansigreen bold",
|
||||
"continuation": "ansigreen",
|
||||
})
|
||||
|
||||
def __init__(self, ollama: OllamaService, config):
|
||||
"""Initialize REPL.
|
||||
|
||||
Args:
|
||||
ollama: Ollama service instance.
|
||||
config: Configuration service instance.
|
||||
"""
|
||||
self.ollama = ollama
|
||||
self.config = config
|
||||
self.history_file = "~/.local-code-assistant/.history"
|
||||
self.session: Optional[PromptSession] = None
|
||||
self.message_history: list[dict[str, str]] = []
|
||||
self.current_language = config.default_language
|
||||
|
||||
def _get_session(self) -> PromptSession:
|
||||
"""Get or create prompt session.
|
||||
|
||||
Returns:
|
||||
PromptSession instance.
|
||||
"""
|
||||
if self.session is None:
|
||||
self.session = PromptSession(
|
||||
history=FileHistory(self.history_file),
|
||||
style=self.PROMPT_STYLE,
|
||||
multiline=True
|
||||
)
|
||||
return self.session
|
||||
|
||||
def _format_prompt(self) -> str:
|
||||
"""Format the REPL prompt.
|
||||
|
||||
Returns:
|
||||
Prompt string.
|
||||
"""
|
||||
return f"[{self.current_language}]>> "
|
||||
|
||||
def _display_welcome(self):
|
||||
"""Display welcome message."""
|
||||
welcome = Text()
|
||||
welcome.append("Local Code Assistant REPL\n", style="bold cyan")
|
||||
welcome.append("Commands:\n", style="bold")
|
||||
welcome.append(" :generate <prompt> - Generate code\n", style="dim")
|
||||
welcome.append(" :explain - Explain last code\n", style="dim")
|
||||
welcome.append(" :lang <language> - Set programming language\n", style="dim")
|
||||
welcome.append(" :model <name> - Set model\n", style="dim")
|
||||
welcome.append(" :status - Show current settings\n", style="dim")
|
||||
welcome.append(" :clear - Clear conversation\n", style="dim")
|
||||
welcome.append(" :quit / Ctrl+D - Exit REPL\n", style="dim")
|
||||
welcome.append("\nType your requests naturally.", style="italic")
|
||||
|
||||
console.print(Panel(welcome, title="Welcome"))
|
||||
|
||||
def _display_response(self, response: str, language: str = "text"):
|
||||
"""Display model response.
|
||||
|
||||
Args:
|
||||
response: Response text.
|
||||
language: Programming language for syntax highlighting.
|
||||
"""
|
||||
if language != "text" and self.config.syntax_highlighting:
|
||||
syntax = Syntax(response, language, line_numbers=True)
|
||||
console.print(Panel(syntax, title="Response"))
|
||||
else:
|
||||
console.print(Panel(response, title="Response"))
|
||||
|
||||
def _handle_command(self, line: str) -> bool:
|
||||
"""Handle special REPL commands.
|
||||
|
||||
Args:
|
||||
line: Input line.
|
||||
|
||||
Returns:
|
||||
True if command was handled, False if not a command.
|
||||
"""
|
||||
line = line.strip()
|
||||
|
||||
if line in (":quit", ":q", "exit"):
|
||||
console.print("[yellow]Goodbye![/yellow]")
|
||||
return True
|
||||
|
||||
handlers = {
|
||||
":generate ": self._handle_generate,
|
||||
":explain": self._handle_explain_cmd,
|
||||
":lang ": self._handle_lang,
|
||||
":model ": self._handle_model,
|
||||
":status": self._handle_status,
|
||||
":clear": self._handle_clear,
|
||||
":help": self._handle_help,
|
||||
":h": self._handle_help,
|
||||
}
|
||||
|
||||
for prefix, handler in handlers.items():
|
||||
if line.startswith(prefix):
|
||||
handler(line)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _handle_generate(self, prompt: str):
|
||||
"""Handle code generation request.
|
||||
|
||||
Args:
|
||||
prompt: User prompt.
|
||||
"""
|
||||
self.message_history.append({"role": "user", "content": prompt})
|
||||
|
||||
console.print("[dim]Generating...[/dim]")
|
||||
|
||||
try:
|
||||
full_prompt = PromptTemplates.code_generation(
|
||||
language=self.current_language,
|
||||
user_prompt=prompt
|
||||
)
|
||||
|
||||
system_prompt = PromptTemplates.build_system_prompt()
|
||||
|
||||
response = self.ollama.generate(
|
||||
prompt=full_prompt,
|
||||
model=self._get_model(),
|
||||
system=system_prompt,
|
||||
temperature=self._get_temperature()
|
||||
)
|
||||
|
||||
self.message_history.append({"role": "assistant", "content": response})
|
||||
self._display_response(response, self.current_language)
|
||||
|
||||
except OllamaServiceError as e:
|
||||
console.print(f"[red]Error: {str(e)}[/red]")
|
||||
|
||||
def _handle_explain_cmd(self, line: str):
|
||||
"""Handle explain command.
|
||||
|
||||
Args:
|
||||
line: Input line.
|
||||
"""
|
||||
if line != ":explain":
|
||||
return
|
||||
|
||||
if self.message_history:
|
||||
for msg in reversed(self.message_history):
|
||||
if msg["role"] == "user":
|
||||
self._handle_explain(msg["content"])
|
||||
return
|
||||
else:
|
||||
console.print("[yellow]No conversation history[/yellow]")
|
||||
|
||||
def _handle_lang(self, line: str):
|
||||
"""Handle language selection command.
|
||||
|
||||
Args:
|
||||
line: Input line.
|
||||
"""
|
||||
if not line.startswith(":lang "):
|
||||
return
|
||||
|
||||
language = line[6:].strip().lower()
|
||||
if LanguageConfig.is_supported(language):
|
||||
self.current_language = language
|
||||
console.print(f"[green]Language set to {language}[/green]")
|
||||
else:
|
||||
supported = ", ".join(LanguageConfig.get_supported_languages())
|
||||
console.print(f"[red]Unsupported language. Supported: {supported}[/red]")
|
||||
|
||||
def _handle_model(self, line: str):
|
||||
"""Handle model selection command.
|
||||
|
||||
Args:
|
||||
line: Input line.
|
||||
"""
|
||||
if not line.startswith(":model "):
|
||||
return
|
||||
|
||||
model = line[7:].strip()
|
||||
self.config.ollama_model = model
|
||||
console.print(f"[green]Model set to {model}[/green]")
|
||||
|
||||
def _handle_status(self, line: str):
|
||||
"""Handle status command.
|
||||
|
||||
Args:
|
||||
line: Input line.
|
||||
"""
|
||||
if line != ":status":
|
||||
return
|
||||
|
||||
status = Text()
|
||||
status.append("Current Settings:\n", style="bold")
|
||||
status.append(f" Language: {self.current_language}\n")
|
||||
status.append(f" Model: {self.config.ollama_model}\n")
|
||||
status.append(f" Ollama: {self.config.ollama_base_url}")
|
||||
console.print(Panel(status, title="Status"))
|
||||
|
||||
def _handle_clear(self, line: str):
|
||||
"""Handle clear command.
|
||||
|
||||
Args:
|
||||
line: Input line.
|
||||
"""
|
||||
if line != ":clear":
|
||||
return
|
||||
|
||||
self.message_history = []
|
||||
console.print("[green]Conversation cleared[/green]")
|
||||
|
||||
def _handle_help(self, line: str):
|
||||
"""Handle help command.
|
||||
|
||||
Args:
|
||||
line: Input line.
|
||||
"""
|
||||
if line not in (":help", ":h"):
|
||||
return
|
||||
|
||||
self._display_welcome()
|
||||
|
||||
def _handle_explain(self, code: str):
|
||||
"""Handle code explanation request.
|
||||
|
||||
Args:
|
||||
code: Code to explain.
|
||||
"""
|
||||
console.print("[dim]Explaining...[/dim]")
|
||||
|
||||
try:
|
||||
full_prompt = PromptTemplates.code_explanation(
|
||||
language=self.current_language,
|
||||
code=code
|
||||
)
|
||||
|
||||
system_prompt = PromptTemplates.build_system_prompt(
|
||||
"You are explaining code to a developer."
|
||||
)
|
||||
|
||||
response = self.ollama.generate(
|
||||
prompt=full_prompt,
|
||||
model=self._get_model(),
|
||||
system=system_prompt,
|
||||
temperature=0.3
|
||||
)
|
||||
|
||||
self._display_response(response)
|
||||
|
||||
except OllamaServiceError as e:
|
||||
console.print(f"[red]Error: {str(e)}[/red]")
|
||||
|
||||
def _get_model(self) -> str:
|
||||
"""Get current model."""
|
||||
return self.config.ollama_model
|
||||
|
||||
def _get_temperature(self) -> float:
|
||||
"""Get current temperature."""
|
||||
return self.config.temperature
|
||||
|
||||
def run(self):
|
||||
"""Run the REPL loop."""
|
||||
self._display_welcome()
|
||||
|
||||
session = self._get_session()
|
||||
|
||||
while True:
|
||||
try:
|
||||
user_input = session.prompt(
|
||||
self._format_prompt(),
|
||||
default=self.current_language
|
||||
).strip()
|
||||
|
||||
if not user_input:
|
||||
continue
|
||||
|
||||
if self._handle_command(user_input):
|
||||
break
|
||||
|
||||
self._handle_generate(user_input)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
console.print("\n[yellow]Interrupted. Type :quit to exit.[/yellow]")
|
||||
except EOFError:
|
||||
console.print("\n[yellow]Goodbye![/yellow]")
|
||||
break
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
"--model", "-m",
|
||||
help="Model to use"
|
||||
)
|
||||
@click.option(
|
||||
"--language", "-l",
|
||||
type=str,
|
||||
default="python",
|
||||
help="Default programming language"
|
||||
)
|
||||
@click.pass_context
|
||||
def repl_cmd(ctx: click.Context, model: Optional[str], language: str):
|
||||
"""Enter interactive REPL mode for code assistance.
|
||||
|
||||
Example:
|
||||
local-code-assistant repl
|
||||
local-code-assistant repl --model codellama --language python
|
||||
|
||||
\f
|
||||
Args:
|
||||
ctx: Click context.
|
||||
model: Model to use.
|
||||
language: Default language.
|
||||
"""
|
||||
config = ctx.obj["config"]
|
||||
if model:
|
||||
config.ollama_model = model
|
||||
|
||||
ollama_service = OllamaService(config)
|
||||
|
||||
if not ollama_service.check_connection():
|
||||
raise click.ClickException(
|
||||
"Cannot connect to Ollama. Make sure it's running."
|
||||
)
|
||||
|
||||
if not LanguageConfig.is_supported(language):
|
||||
supported = ", ".join(LanguageConfig.get_supported_languages())
|
||||
raise click.ClickException(
|
||||
f"Unsupported language: {language}. Supported: {supported}"
|
||||
)
|
||||
|
||||
repl = REPL(ollama_service, config)
|
||||
repl.current_language = language
|
||||
repl.run()
|
||||
Reference in New Issue
Block a user