Files
local-llm-stack/docker-compose.yml

178 lines
4.6 KiB
YAML

version: '3.8'
networks:
ai-network:
driver: bridge
# Объявляем все именованные тома, которые используются в сервисах
volumes:
postgres_storage: {}
ollama_storage: {}
qdrant_storage: {}
n8n_storage: {}
# Шаблон для n8n, чтобы не дублировать код
x-n8n: &service-n8n
image: n8nio/n8n:latest
networks: [ 'ai-network' ]
environment:
- DB_TYPE=postgresdb
- DB_POSTGRESDB_HOST=postgres
- DB_POSTGRESDB_PORT=5432
- DB_POSTGRESDB_USER=${POSTGRES_USER}
- DB_POSTGRESDB_PASSWORD=${POSTGRES_PASSWORD}
- DB_POSTGRESDB_DATABASE=${POSTGRES_DB}
- N8N_DIAGNOSTICS_ENABLED=${N8N_DIAGNOSTICS_ENABLED}
- N8N_PERSONALIZATION_ENABLED=${N8N_PERSONALIZATION_ENABLED}
- N8N_ENCRYPTION_KEY=${N8N_ENCRYPTION_KEY}
- N8N_USER_MANAGEMENT_JWT_SECRET=${N8N_USER_MANAGEMENT_JWT_SECRET}
services:
# --- БАЗЫ ДАННЫХ ---
postgres:
image: postgres:16-alpine
container_name: postgres
networks: [ 'ai-network' ]
restart: unless-stopped
ports:
- "5432:5432"
environment:
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_DB=${POSTGRES_DB}
volumes:
- postgres_storage:/var/lib/postgresql/data
healthcheck:
test: ['CMD-SHELL', 'pg_isready -h localhost -U $${POSTGRES_USER} -d $${POSTGRES_DB}']
interval: 5s
timeout: 5s
retries: 10
qdrant:
image: qdrant/qdrant:latest
container_name: qdrant
networks: [ 'ai-network' ]
restart: unless-stopped
ports:
- "6333:6333"
volumes:
- qdrant_storage:/qdrant/storage
# --- AI ENGINES ---
ollama:
image: ollama/ollama:latest
container_name: ollama
networks: [ 'ai-network' ]
restart: unless-stopped
ports:
- "11434:11434"
volumes:
- ollama_storage:/root/.ollama
# --- СЕКЦИЯ ДЛЯ РАБОТЫ GPU ---
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1 # Используем 1 видеокарту
capabilities: [gpu]
# --- ИНТЕРФЕЙСЫ И АГЕНТЫ ---
open-webui:
image: ghcr.io/open-webui/open-webui:latest
container_name: open-webui
networks: [ 'ai-network' ]
restart: unless-stopped
ports:
- "11500:8080"
extra_hosts:
- "host.docker.internal:host-gateway"
environment:
- OLLAMA_BASE_URL=http://ollama:11434
volumes:
- ./data/openwebui:/app/backend/data
depends_on:
- ollama
flowise:
image: flowiseai/flowise:latest
container_name: flowise
networks: [ 'ai-network' ]
restart: unless-stopped
ports:
- "${FLOWISE_PORT:-3001}:3001"
environment:
- PORT=3001
- DATABASE_PATH=/root/.flowise
- APIKEY_PATH=/root/.flowise
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
- ./data/flowise:/root/.flowise
anythingllm:
image: mintplexlabs/anythingllm:latest
container_name: anythingllm
networks: [ 'ai-network' ]
restart: unless-stopped
ports:
- "3002:3001"
cap_add:
- SYS_ADMIN
environment:
- STORAGE_DIR=/app/server/storage
- JWT_SECRET=${JWT_SECRET}
volumes:
- ./data/anythingllm:/app/server/storage
depends_on:
- qdrant
# --- АВТОМАТИЗАЦИЯ N8N ---
n8n-import:
<<: *service-n8n
container_name: n8n-import
entrypoint: /bin/sh
command:
- "-c"
- "n8n import:credentials --separate --input=/backup/credentials && n8n import:workflow --separate --input=/backup/workflows"
volumes:
- ./n8n/backup:/backup
depends_on:
postgres:
condition: service_healthy
n8n:
<<: *service-n8n
container_name: n8n
restart: unless-stopped
ports:
- "5678:5678"
volumes:
- n8n_storage:/home/node/.n8n
- ./n8n/backup:/backup
- ./shared:/data/shared
depends_on:
postgres:
condition: service_healthy
n8n-import:
condition: service_completed_successfully
# --- ВСПОМОГАТЕЛЬНЫЕ СЕРВИСЫ ---
pipelines:
image: ghcr.io/open-webui/pipelines:main
container_name: pipelines
networks: [ 'ai-network' ]
restart: unless-stopped
volumes:
- ./data/pipelines:/app/pipelines
environment:
- PIPELINES_API_KEY=${PIPELINES_API_KEY:-0p3n-w3bu!}
watchtower:
image: containrrr/watchtower
container_name: watchtower
networks: [ 'ai-network' ]
restart: unless-stopped
volumes:
- /var/run/docker.sock:/var/run/docker.sock
command: --interval 3600 --cleanup --monitor-only