diff --git a/.env b/.env new file mode 100644 index 0000000..74e7247 --- /dev/null +++ b/.env @@ -0,0 +1,36 @@ +# PostgreSQL Configuration +POSTGRES_USER=your_postgres_username +POSTGRES_PASSWORD=your_secure_password +POSTGRES_DB=n8n + +# n8n Security Keys +N8N_ENCRYPTION_KEY=your-encryption-key-here +N8N_USER_MANAGEMENT_JWT_SECRET=your-jwt-secret-here +N8N_DIAGNOSTICS_ENABLED=false +N8N_PERSONALIZATION_ENABLED=false + +# Flowise Configuration +FLOWISE_PORT=3001 + +# AnythingLLM Configuration +STORAGE_DIR=/app/server/storage +JWT_SECRET=your_jwt_secret_at_least_20_characters_long +LLM_PROVIDER=ollama +# For native Ollama installation, use host.docker.internal +# For containerized Ollama, use: http://ollama:11434 +OLLAMA_BASE_PATH=http://host.docker.internal:11434 +OLLAMA_MODEL_PREF=qwen3-vl:8b +OLLAMA_MODEL_TOKEN_LIMIT=4096 +EMBEDDING_ENGINE=ollama +# For native Ollama installation, use host.docker.internal +# For containerized Ollama, use: http://ollama:11434 +EMBEDDING_BASE_PATH=http://host.docker.internal:11434 +EMBEDDING_MODEL_PREF=nomic-embed-text:latest +EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192 +VECTOR_DB=qdrant +WHISPER_PROVIDER=local +TTS_PROVIDER=native +PASSWORDMINCHAR=8 +QDRANT_ENDPOINT=http://qdrant:6333 +QDRANT_COLLECTION=anythingllm +CORS_ALLOW_ORIGIN=* \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..288d115 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,178 @@ +version: '3.8' + +networks: + ai-network: + driver: bridge + +# Объявляем все именованные тома, которые используются в сервисах +volumes: + postgres_storage: {} + ollama_storage: {} + qdrant_storage: {} + n8n_storage: {} + +# Шаблон для n8n, чтобы не дублировать код +x-n8n: &service-n8n + image: n8nio/n8n:latest + networks: [ 'ai-network' ] + environment: + - DB_TYPE=postgresdb + - DB_POSTGRESDB_HOST=postgres + - DB_POSTGRESDB_PORT=5432 + - DB_POSTGRESDB_USER=${POSTGRES_USER} + - DB_POSTGRESDB_PASSWORD=${POSTGRES_PASSWORD} + - DB_POSTGRESDB_DATABASE=${POSTGRES_DB} + - N8N_DIAGNOSTICS_ENABLED=${N8N_DIAGNOSTICS_ENABLED} + - N8N_PERSONALIZATION_ENABLED=${N8N_PERSONALIZATION_ENABLED} + - N8N_ENCRYPTION_KEY=${N8N_ENCRYPTION_KEY} + - N8N_USER_MANAGEMENT_JWT_SECRET=${N8N_USER_MANAGEMENT_JWT_SECRET} + +services: + # --- БАЗЫ ДАННЫХ --- + postgres: + image: postgres:16-alpine + container_name: postgres + networks: [ 'ai-network' ] + restart: unless-stopped + ports: + - "5432:5432" + environment: + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_DB=${POSTGRES_DB} + volumes: + - postgres_storage:/var/lib/postgresql/data + healthcheck: + test: ['CMD-SHELL', 'pg_isready -h localhost -U $${POSTGRES_USER} -d $${POSTGRES_DB}'] + interval: 5s + timeout: 5s + retries: 10 + + qdrant: + image: qdrant/qdrant:latest + container_name: qdrant + networks: [ 'ai-network' ] + restart: unless-stopped + ports: + - "6333:6333" + volumes: + - qdrant_storage:/qdrant/storage + + # --- AI ENGINES --- + ollama: + image: ollama/ollama:latest + container_name: ollama + networks: [ 'ai-network' ] + restart: unless-stopped + ports: + - "11434:11434" + volumes: + - ollama_storage:/root/.ollama + # --- СЕКЦИЯ ДЛЯ РАБОТЫ GPU --- + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 # Используем 1 видеокарту + capabilities: [gpu] + + # --- ИНТЕРФЕЙСЫ И АГЕНТЫ --- + open-webui: + image: ghcr.io/open-webui/open-webui:latest + container_name: open-webui + networks: [ 'ai-network' ] + restart: unless-stopped + ports: + - "11500:8080" + extra_hosts: + - "host.docker.internal:host-gateway" + environment: + - OLLAMA_BASE_URL=http://ollama:11434 + volumes: + - ./data/openwebui:/app/backend/data + depends_on: + - ollama + + flowise: + image: flowiseai/flowise:latest + container_name: flowise + networks: [ 'ai-network' ] + restart: unless-stopped + ports: + - "${FLOWISE_PORT:-3001}:3001" + environment: + - PORT=3001 + - DATABASE_PATH=/root/.flowise + - APIKEY_PATH=/root/.flowise + extra_hosts: + - "host.docker.internal:host-gateway" + volumes: + - ./data/flowise:/root/.flowise + + anythingllm: + image: mintplexlabs/anythingllm:latest + container_name: anythingllm + networks: [ 'ai-network' ] + restart: unless-stopped + ports: + - "3002:3001" + cap_add: + - SYS_ADMIN + environment: + - STORAGE_DIR=/app/server/storage + - JWT_SECRET=${JWT_SECRET} + volumes: + - ./data/anythingllm:/app/server/storage + depends_on: + - qdrant + + # --- АВТОМАТИЗАЦИЯ N8N --- + n8n-import: + <<: *service-n8n + container_name: n8n-import + entrypoint: /bin/sh + command: + - "-c" + - "n8n import:credentials --separate --input=/backup/credentials && n8n import:workflow --separate --input=/backup/workflows" + volumes: + - ./n8n/backup:/backup + depends_on: + postgres: + condition: service_healthy + + n8n: + <<: *service-n8n + container_name: n8n + restart: unless-stopped + ports: + - "5678:5678" + volumes: + - n8n_storage:/home/node/.n8n + - ./n8n/backup:/backup + - ./shared:/data/shared + depends_on: + postgres: + condition: service_healthy + n8n-import: + condition: service_completed_successfully + + # --- ВСПОМОГАТЕЛЬНЫЕ СЕРВИСЫ --- + pipelines: + image: ghcr.io/open-webui/pipelines:main + container_name: pipelines + networks: [ 'ai-network' ] + restart: unless-stopped + volumes: + - ./data/pipelines:/app/pipelines + environment: + - PIPELINES_API_KEY=${PIPELINES_API_KEY:-0p3n-w3bu!} + + watchtower: + image: containrrr/watchtower + container_name: watchtower + networks: [ 'ai-network' ] + restart: unless-stopped + volumes: + - /var/run/docker.sock:/var/run/docker.sock + command: --interval 3600 --cleanup --monitor-only \ No newline at end of file