Files
local-llm-stack/.env.sample
Dale-Kurt Murray 71dae881d0 Initial commit
2025-03-10 22:35:45 -04:00

36 lines
1.1 KiB
Plaintext

# PostgreSQL Configuration
POSTGRES_USER=your_postgres_username
POSTGRES_PASSWORD=your_secure_password
POSTGRES_DB=n8n
# n8n Security Keys
N8N_ENCRYPTION_KEY=your-encryption-key-here
N8N_USER_MANAGEMENT_JWT_SECRET=your-jwt-secret-here
N8N_DIAGNOSTICS_ENABLED=false
N8N_PERSONALIZATION_ENABLED=false
# Flowise Configuration
FLOWISE_PORT=3001
# AnythingLLM Configuration
STORAGE_DIR=/app/server/storage
JWT_SECRET=your_jwt_secret_at_least_20_characters_long
LLM_PROVIDER=ollama
# For native Ollama installation, use host.docker.internal
# For containerized Ollama, use: http://ollama:11434
OLLAMA_BASE_PATH=http://host.docker.internal:11434
OLLAMA_MODEL_PREF=llama2
OLLAMA_MODEL_TOKEN_LIMIT=4096
EMBEDDING_ENGINE=ollama
# For native Ollama installation, use host.docker.internal
# For containerized Ollama, use: http://ollama:11434
EMBEDDING_BASE_PATH=http://host.docker.internal:11434
EMBEDDING_MODEL_PREF=nomic-embed-text:latest
EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
VECTOR_DB=qdrant
WHISPER_PROVIDER=local
TTS_PROVIDER=native
PASSWORDMINCHAR=8
QDRANT_ENDPOINT=http://qdrant:6333
QDRANT_COLLECTION=anythingllm