Add CLI commands and Gitea Actions workflow
Some checks failed
CI / build (push) Has been cancelled
CI / test (push) Has been cancelled

This commit is contained in:
2026-01-31 15:27:57 +00:00
parent 0615ee93c8
commit f9a9bfa43f

View File

@@ -0,0 +1,213 @@
"""Generate command for Local Code Assistant."""
from pathlib import Path
from typing import Optional
import click
from rich.console import Console
from rich.panel import Panel
from rich.syntax import Syntax
from local_code_assistant.commands.base import BaseCommand
from local_code_assistant.prompts.templates import LanguageConfig, PromptTemplates
from local_code_assistant.services.ollama import OllamaService
console = Console()
class GenerateCommand(BaseCommand):
"""Command for generating code from natural language prompts."""
def __init__(self, ollama: OllamaService, config):
"""Initialize generate command.
Args:
ollama: Ollama service instance.
config: Configuration service instance.
"""
super().__init__(ollama, config)
self.supported_languages = LanguageConfig.get_supported_languages()
def run(
self,
prompt: str,
language: str,
output: Optional[Path] = None,
clipboard: bool = False,
model: Optional[str] = None,
temperature: Optional[float] = None
) -> str:
"""Execute code generation.
Args:
prompt: Natural language description of code to generate.
language: Programming language.
output: Optional output file path.
clipboard: Whether to copy to clipboard.
model: Model to use.
temperature: Temperature for generation.
Returns:
Generated code.
"""
if not self.ollama.check_connection():
raise click.ClickException(
"Cannot connect to Ollama. Make sure it's running."
)
if language not in self.supported_languages:
supported = ", ".join(self.supported_languages)
raise click.ClickException(
f"Unsupported language: {language}. Supported: {supported}"
)
model = model or self.config.ollama_model
temperature_val = temperature if temperature is not None else self.config.temperature
console.print(f"[dim]Generating {language} code...[/dim]")
full_prompt = PromptTemplates.code_generation(
language=language,
user_prompt=prompt,
files_context=None
)
system_prompt = PromptTemplates.build_system_prompt()
try:
generated_code = self.ollama.generate(
prompt=full_prompt,
model=model,
system=system_prompt,
temperature=temperature_val
)
self._display_output(generated_code, language)
if output:
output.write_text(generated_code)
console.print(f"[green]Code written to {output}[/green]")
if clipboard:
import pyperclip
pyperclip.copy(generated_code)
console.print("[green]Code copied to clipboard[/green]")
return generated_code
except Exception as e:
raise click.ClickException(f"Generation failed: {str(e)}") from e
def _display_output(self, code: str, language: str):
"""Display generated code with syntax highlighting.
Args:
code: Generated code.
language: Programming language.
"""
if self.config.syntax_highlighting:
syntax = Syntax(code, language, line_numbers=True)
console.print(Panel(syntax, title="Generated Code"))
else:
console.print(Panel(code, title="Generated Code"))
def generate(
ctx: click.Context,
prompt: str,
language: str,
output: Optional[Path] = None,
clipboard: bool = False,
model: Optional[str] = None,
temperature: Optional[float] = None
):
"""Generate code from natural language prompt.
Args:
ctx: Click context.
prompt: Description of code to generate.
language: Programming language.
output: Output file path.
clipboard: Copy to clipboard.
model: Model to use.
temperature: Generation temperature.
"""
config = ctx.obj["config"]
ollama = ctx.obj["ollama_service"]
command = GenerateCommand(ollama, config)
command.run(
prompt=prompt,
language=language,
output=output,
clipboard=clipboard,
model=model,
temperature=temperature
)
@click.command()
@click.argument("prompt", type=click.STRING)
@click.option(
"--language", "-l",
type=click.Choice(LanguageConfig.get_supported_languages()),
default="python",
help="Programming language for generated code"
)
@click.option(
"--output", "-o",
type=click.Path(path_type=Path),
help="Write generated code to file"
)
@click.option(
"--clipboard/--no-clipboard",
default=False,
help="Copy generated code to clipboard"
)
@click.option(
"--model", "-m",
help="Model to use for generation"
)
@click.option(
"--temperature", "-t",
type=float,
help="Temperature for generation (0.0-1.0)"
)
@click.pass_context
def generate_cmd(
ctx: click.Context,
prompt: str,
language: str,
output: Optional[Path],
clipboard: bool,
model: Optional[str],
temperature: Optional[float]
):
"""Generate code from a natural language description.
Example:
local-code-assistant generate "a function to calculate fibonacci" --language python
\f
Args:
ctx: Click context.
prompt: Description of code to generate.
language: Programming language.
output: Output file path.
clipboard: Copy to clipboard.
model: Model to use.
temperature: Generation temperature.
"""
config = ctx.obj["config"]
ollama_service = OllamaService(config)
ctx.obj["ollama_service"] = ollama_service
command = GenerateCommand(ollama_service, config)
command.run(
prompt=prompt,
language=language,
output=output,
clipboard=clipboard,
model=model,
temperature=temperature
)