Adding Langfuse instructions for cloud deployment of the local AI stack

This commit is contained in:
Cole Medin
2025-04-24 19:53:39 -05:00
parent 2c58bf12af
commit 531a3c0e22
4 changed files with 9 additions and 1 deletions

View File

@@ -59,6 +59,7 @@ ENCRYPTION_KEY=generate-with-openssl # generate via `openssl rand -hex 32`
# WEBUI_HOSTNAME=openwebui.yourdomain.com
# FLOWISE_HOSTNAME=flowise.yourdomain.com
# SUPABASE_HOSTNAME=supabase.yourdomain.com
# LANGFUSE_HOSTNAME=langfuse.yourdomain.com
# OLLAMA_HOSTNAME=ollama.yourdomain.com
# SEARXNG_HOSTNAME=searxng.yourdomain.com
# LETSENCRYPT_EMAIL=internal

View File

@@ -20,6 +20,11 @@
reverse_proxy localhost:3001
}
# Langfuse
{$LANGFUSE_HOSTNAME} {
reverse_proxy localhost:3002
}
# Ollama API
{$OLLAMA_HOSTNAME} {
reverse_proxy localhost:11434

View File

@@ -191,7 +191,8 @@ Before running the above commands to pull the repo and install everything:
1. Run the commands as root to open up the necessary ports:
- ufw enable
- ufw allow 8000 && ufw allow 3001 && ufw allow 3000 && ufw allow 5678 && ufw allow 80 && ufw allow 443
- ufw allow 8000 && ufw allow 3000 && ufw allow 5678 && ufw allow 3002 && ufw allow 80 && ufw allow 443
- ufw allow 3001 (if you want to expose Flowise, you will have to set up the [environment variables](https://docs.flowiseai.com/configuration/environment-variables) to enable authentication)
- ufw allow 8080 (if you want to expose SearXNG)
- ufw allow 11434 (if you want to expose Ollama)
- ufw reload

View File

@@ -125,6 +125,7 @@ services:
- OLLAMA_HOSTNAME=${OLLAMA_HOSTNAME:-":8004"}
- SUPABASE_HOSTNAME=${SUPABASE_HOSTNAME:-":8005"}
- SEARXNG_HOSTNAME=${SEARXNG_HOSTNAME:-":8006"}
- LANGFUSE_HOSTNAME=${LANGFUSE_HOSTNAME:-":8006"}
- LETSENCRYPT_EMAIL=${LETSENCRYPT_EMAIL:-internal}
cap_drop:
- ALL