fix: resolve CI workflow configuration issues
This commit is contained in:
24
.env.example
24
.env.example
@@ -1,23 +1 @@
|
|||||||
# Local Code Assistant Configuration
|
local_code_assistant/.env.example
|
||||||
# Copy this file to .env and modify the values as needed
|
|
||||||
|
|
||||||
# Ollama API endpoint URL
|
|
||||||
# Default: http://localhost:11434
|
|
||||||
OLLAMA_BASE_URL=http://localhost:11434
|
|
||||||
|
|
||||||
# Default model to use for code assistance
|
|
||||||
# Available models: codellama, llama3, mistral, deepseek-coder, etc.
|
|
||||||
# Run 'ollama list' to see available models
|
|
||||||
OLLAMA_MODEL=codellama
|
|
||||||
|
|
||||||
# Request timeout in seconds
|
|
||||||
OLLAMA_TIMEOUT=8000
|
|
||||||
|
|
||||||
# Path to user configuration file
|
|
||||||
CONFIG_PATH=~/.config/local-code-assistant/config.yaml
|
|
||||||
|
|
||||||
# Enable verbose logging
|
|
||||||
VERBOSE=false
|
|
||||||
|
|
||||||
# Enable streaming responses
|
|
||||||
STREAMING=true
|
|
||||||
Reference in New Issue
Block a user