From 98b185dc5279b3422358ad2c5fbb1251c9abf5ff Mon Sep 17 00:00:00 2001 From: 7000pctAUTO Date: Wed, 4 Feb 2026 17:57:25 +0000 Subject: [PATCH] Add interactive mode module --- app/src/git_commit_generator/interactive.py | 136 ++++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100644 app/src/git_commit_generator/interactive.py diff --git a/app/src/git_commit_generator/interactive.py b/app/src/git_commit_generator/interactive.py new file mode 100644 index 0000000..2a62ad0 --- /dev/null +++ b/app/src/git_commit_generator/interactive.py @@ -0,0 +1,136 @@ +"""Interactive mode for message refinement.""" +from enum import Enum +from typing import Optional + +from rich.console import Console +from rich.prompt import Prompt +from rich.text import Text + + +class Action(Enum): + """User action options in interactive mode.""" + + ACCEPT = "a" + EDIT = "e" + REGENERATE = "r" + QUIT = "q" + + +class InteractiveMode: + """Interactive mode handler for message refinement.""" + + def __init__(self): + """Initialize interactive mode.""" + self.console = Console() + + def show_message(self, message: str) -> None: + """Display a message to the user.""" + self.console.print(Text(message, style="bold green")) + + def show_diff(self, diff: str) -> None: + """Display the diff being analyzed.""" + self.console.print("\n[bold]Changes detected:[/bold]") + self.console.print(diff[:500] + "..." if len(diff) > 500 else diff) + + def prompt_for_action(self, current_message: str) -> tuple[Action, Optional[str]]: + """Prompt user for action.""" + self.console.print("\n[bold]Generated commit message:[/bold]") + self.console.print(f"[cyan]{current_message}[/cyan]") + + self.console.print("\n[bold]Options:[/bold]") + self.console.print(" [green](a)[/green]ccept message") + self.console.print(" [green](e)[/green]dit message") + self.console.print(" [green](r)[/green]egenerate") + self.console.print(" [green](q)[/green]uit") + + choice = Prompt.ask( + "What would you like to do?", + choices=["a", "e", "r", "q"], + default="a", + ).lower() + + if choice == "a": + return Action.ACCEPT, None + elif choice == "e": + edited = Prompt.ask("Enter new commit message", default=current_message) + return Action.ACCEPT, edited + elif choice == "r": + return Action.REGENERATE, None + else: + return Action.QUIT, None + + def confirm_commit(self, message: str) -> bool: + """Confirm the commit message before proceeding.""" + self.console.print(f"\n[bold]Final commit message:[/bold]") + self.console.print(f"[green]{message}[/green]") + + confirm = Prompt.ask( + "Proceed with commit?", + choices=["y", "n"], + default="y", + ).lower() + + return confirm == "y" + + def show_error(self, error: str) -> None: + """Display an error message.""" + self.console.print(Text(f"Error: {error}", style="bold red")) + + def show_info(self, info: str) -> None: + """Display an info message.""" + self.console.print(Text(info, style="blue")) + + def show_connection_status( + self, connected: bool, model: Optional[str] = None + ) -> None: + """Show Ollama connection status.""" + if connected: + status = f"[green]Connected to Ollama[/green]" + if model: + status += f" (model: {model})" + self.console.print(status) + else: + self.console.print( + Text( + "Could not connect to Ollama. Make sure it's running.", + style="bold red", + ) + ) + + def show_no_changes_warning(self) -> None: + """Show warning when no changes detected.""" + self.console.print( + Text( + "No changes detected. Stage some files with 'git add' or use --unstaged flag.", + style="bold yellow", + ) + ) + + def show_help(self) -> None: + """Show help information.""" + help_text = """ +[bold]Git Commit Message Generator[/bold] + +A CLI tool that generates conventional git commit messages using local LLMs. + +[bold]Commands:[/bold] + generate Generate a commit message from staged/unstaged changes + changelog Generate CHANGELOG.md from git history + config Configure settings + +[bold]Options:[/bold] + --unstaged Include unstaged changes + --staged Include only staged changes (default) + --model Specify which model to use + --interactive, -i Use interactive mode + +[bold]Environment Variables:[/bold] + OLLAMA_HOST Ollama server URL (default: http://localhost:11434) + OLLAMA_MODEL Default model to use +""" + self.console.print(help_text) + + +def get_interactive_mode() -> InteractiveMode: + """Get InteractiveMode instance.""" + return InteractiveMode()