# Local AI Commit Reviewer - Example Configuration # Copy this file to .aicr.yaml in your project root # LLM Configuration llm: # Ollama endpoint endpoint: "http://localhost:11434" # Model to use for reviews model: "codellama" # Request timeout in seconds timeout: 120 # Maximum tokens to generate max_tokens: 2048 # Temperature (0.0-1.0) temperature: 0.3 # Review Settings review: # Strictness: permissive, balanced, strict strictness: "balanced" # Maximum issues per file max_issues_per_file: 20 # Enable syntax highlighting syntax_highlighting: true # Show line numbers show_line_numbers: true # Language-specific configurations languages: python: enabled: true review_rules: - "pep8" - "type-hints" - "docstrings" javascript: enabled: true review_rules: - "airbnb" typescript: enabled: true review_rules: - "airbnb" go: enabled: true review_rules: - "golint" - "staticcheck" rust: enabled: true review_rules: - "clippy" java: enabled: true review_rules: - "google-java" c: enabled: true review_rules: - "cppcheck" cpp: enabled: true review_rules: - "cppcheck" # Git Hook Configuration hooks: enabled: true fail_on_critical: true allow_bypass: true # Output Configuration output: format: "terminal" theme: "auto" show_suggestions: true # Logging Configuration logging: level: "info" log_file: "" structured: false