From cd520eaaac20e28f370ae82ab8619909e902d3e8 Mon Sep 17 00:00:00 2001 From: 7000pctAUTO Date: Sat, 31 Jan 2026 15:24:55 +0000 Subject: [PATCH] Initial upload: Local Code Assistant with CI/CD workflow --- .env.example | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .env.example diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..0a90382 --- /dev/null +++ b/.env.example @@ -0,0 +1,23 @@ +# Local Code Assistant Configuration +# Copy this file to .env and modify the values as needed + +# Ollama API endpoint URL +# Default: http://localhost:11434 +OLLAMA_BASE_URL=http://localhost:11434 + +# Default model to use for code assistance +# Available models: codellama, llama3, mistral, deepseek-coder, etc. +# Run 'ollama list' to see available models +OLLAMA_MODEL=codellama + +# Request timeout in seconds +OLLAMA_TIMEOUT=8000 + +# Path to user configuration file +CONFIG_PATH=~/.config/local-code-assistant/config.yaml + +# Enable verbose logging +VERBOSE=false + +# Enable streaming responses +STREAMING=true \ No newline at end of file