mirror of
https://github.com/dalekurt/local-llm-stack.git
synced 2026-02-26 14:21:55 +00:00
Initial commit
This commit is contained in:
35
.env.sample
Normal file
35
.env.sample
Normal file
@@ -0,0 +1,35 @@
|
||||
# PostgreSQL Configuration
|
||||
POSTGRES_USER=your_postgres_username
|
||||
POSTGRES_PASSWORD=your_secure_password
|
||||
POSTGRES_DB=n8n
|
||||
|
||||
# n8n Security Keys
|
||||
N8N_ENCRYPTION_KEY=your-encryption-key-here
|
||||
N8N_USER_MANAGEMENT_JWT_SECRET=your-jwt-secret-here
|
||||
N8N_DIAGNOSTICS_ENABLED=false
|
||||
N8N_PERSONALIZATION_ENABLED=false
|
||||
|
||||
# Flowise Configuration
|
||||
FLOWISE_PORT=3001
|
||||
|
||||
# AnythingLLM Configuration
|
||||
STORAGE_DIR=/app/server/storage
|
||||
JWT_SECRET=your_jwt_secret_at_least_20_characters_long
|
||||
LLM_PROVIDER=ollama
|
||||
# For native Ollama installation, use host.docker.internal
|
||||
# For containerized Ollama, use: http://ollama:11434
|
||||
OLLAMA_BASE_PATH=http://host.docker.internal:11434
|
||||
OLLAMA_MODEL_PREF=llama2
|
||||
OLLAMA_MODEL_TOKEN_LIMIT=4096
|
||||
EMBEDDING_ENGINE=ollama
|
||||
# For native Ollama installation, use host.docker.internal
|
||||
# For containerized Ollama, use: http://ollama:11434
|
||||
EMBEDDING_BASE_PATH=http://host.docker.internal:11434
|
||||
EMBEDDING_MODEL_PREF=nomic-embed-text:latest
|
||||
EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
|
||||
VECTOR_DB=qdrant
|
||||
WHISPER_PROVIDER=local
|
||||
TTS_PROVIDER=native
|
||||
PASSWORDMINCHAR=8
|
||||
QDRANT_ENDPOINT=http://qdrant:6333
|
||||
QDRANT_COLLECTION=anythingllm
|
||||
Reference in New Issue
Block a user