Initial upload: Local Code Assistant with CI/CD workflow
This commit is contained in:
23
.env.example
Normal file
23
.env.example
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Local Code Assistant Configuration
|
||||||
|
# Copy this file to .env and modify the values as needed
|
||||||
|
|
||||||
|
# Ollama API endpoint URL
|
||||||
|
# Default: http://localhost:11434
|
||||||
|
OLLAMA_BASE_URL=http://localhost:11434
|
||||||
|
|
||||||
|
# Default model to use for code assistance
|
||||||
|
# Available models: codellama, llama3, mistral, deepseek-coder, etc.
|
||||||
|
# Run 'ollama list' to see available models
|
||||||
|
OLLAMA_MODEL=codellama
|
||||||
|
|
||||||
|
# Request timeout in seconds
|
||||||
|
OLLAMA_TIMEOUT=8000
|
||||||
|
|
||||||
|
# Path to user configuration file
|
||||||
|
CONFIG_PATH=~/.config/local-code-assistant/config.yaml
|
||||||
|
|
||||||
|
# Enable verbose logging
|
||||||
|
VERBOSE=false
|
||||||
|
|
||||||
|
# Enable streaming responses
|
||||||
|
STREAMING=true
|
||||||
Reference in New Issue
Block a user