fix: resolve CI workflow configuration issues
Some checks failed
CI / build (push) Has been cancelled
CI / test (push) Has been cancelled

This commit is contained in:
2026-01-31 15:57:24 +00:00
parent 71934c7173
commit 56d23ec89b

View File

@@ -1,23 +1 @@
# Local Code Assistant Configuration local_code_assistant/.env.example
# Copy this file to .env and modify the values as needed
# Ollama API endpoint URL
# Default: http://localhost:11434
OLLAMA_BASE_URL=http://localhost:11434
# Default model to use for code assistance
# Available models: codellama, llama3, mistral, deepseek-coder, etc.
# Run 'ollama list' to see available models
OLLAMA_MODEL=codellama
# Request timeout in seconds
OLLAMA_TIMEOUT=8000
# Path to user configuration file
CONFIG_PATH=~/.config/local-code-assistant/config.yaml
# Enable verbose logging
VERBOSE=false
# Enable streaming responses
STREAMING=true